From 537a6e883612607838c6f8c304ae45fdaab2b0f7 Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Fri, 14 Nov 2025 16:17:26 -0500 Subject: [PATCH 01/14] First commit of the devtools work. This will configure and launch a stack. It isn't grabbing some things correctly, because of some more env-specific configuration that needs to be done. However, it's a solid start. --- Makefile | 6 + dev-local/.env.local | 77 +++++++++++ dev-local/Dockerfile.local | 31 +++++ dev-local/Makefile | 20 +++ dev-local/README.md | 121 ++++++++++++++++++ dev-local/build-local.sh | 10 ++ dev-local/check-env-postconditions.bash | 15 +++ dev-local/check-env-preconditions.bash | 15 +++ dev-local/docker-compose-local.yaml | 72 +++++++++++ .../Dockerfile.msls} | 0 dev-local/msls-local/Makefile | 7 + dev-local/{ => msls-local}/app.py | 0 dev-local/{ => msls-local}/requirements.txt | 0 .../{ => msls-local}/templates/login.html | 0 dev-local/{ => msls-local}/users.csv | 0 dev-local/retrieve-certs.bash | 58 +++++++++ dev-local/retrieve-salt.bash | 39 ++++++ dev-local/run-appropriate-stack.bash | 45 +++++++ dev-local/start-local.sh | 85 ++++++++++++ 19 files changed, 601 insertions(+) create mode 100644 dev-local/.env.local create mode 100644 dev-local/Dockerfile.local create mode 100644 dev-local/Makefile create mode 100644 dev-local/README.md create mode 100755 dev-local/build-local.sh create mode 100755 dev-local/check-env-postconditions.bash create mode 100755 dev-local/check-env-preconditions.bash create mode 100644 dev-local/docker-compose-local.yaml rename dev-local/{Dockerfile => msls-local/Dockerfile.msls} (100%) create mode 100644 dev-local/msls-local/Makefile rename dev-local/{ => msls-local}/app.py (100%) rename dev-local/{ => msls-local}/requirements.txt (100%) rename dev-local/{ => msls-local}/templates/login.html (100%) rename dev-local/{ => msls-local}/users.csv (100%) create mode 100755 dev-local/retrieve-certs.bash create mode 100755 dev-local/retrieve-salt.bash create mode 100755 dev-local/run-appropriate-stack.bash create mode 100755 dev-local/start-local.sh diff --git a/Makefile b/Makefile index 0f0c0c7fa..ba022d225 100644 --- a/Makefile +++ b/Makefile @@ -15,3 +15,9 @@ reqs-install: reqs-install-dev: pip install -r requirements/requirements.dev.txt --no-index --find-links ./vendor/ + +build-local: + cd dev-local ; make build-local ; cd .. + +run-local: + cd dev-local ; make run-local ; cd .. \ No newline at end of file diff --git a/dev-local/.env.local b/dev-local/.env.local new file mode 100644 index 000000000..648797bcc --- /dev/null +++ b/dev-local/.env.local @@ -0,0 +1,77 @@ +################################################################################ +# DEVELOPER VARIABLES +# You may want to tweak these any given day of the week. + +## enable debugpy remote debugging (on port 5678) +# 20251113 MCJ This clearly works, but it does not seem to be used anywhere +# that would actually affect application behavior. :confused: +BB20_ENABLE_REMOTE_DEBUG=true +BB20_REMOTE_DEBUG_WAIT_ATTACH=false +# Setting this to `false` will disable tests that run against the live BFD server. +# To run them locally, you need to be authenticated and on the VPN. +RUN_ONLINE_TESTS=true +# You probably shouldn't touch these. +#DJANGO_FHIR_CERTSTORE_REL=docker-compose/certstore +DJANGO_FHIR_CERTSTORE=/certstore + +################################################################################ +# AWS CREDENTIALS +# These values must be loaded into the local environment at the time the +# stack is started. That implies (perhaps) a `kion s` or similar has +# been executed before the `docker compose up`. +AWS_DEFAULT_REGION=us-east-1 +AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} +AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} +AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN} + +# 20251113 MCJ This is used nowhere in the codebase. +# FIXME: We should not have anything that is windows-specific. +# All development should happen in authentic, containerized environments. +# COMPOSE_CONVERT_WINDOWS_PATHS=1 +# Local superuser account +SUPER_USER_NAME=root +SUPER_USER_PASSWORD=blue123 +SUPER_USER_EMAIL=bluebutton@example.com +# We run migrations *always* when running locally +DB_MIGRATIONS=true +# This would be cryptographically secure in production. +DJANGO_SECRET_KEY=replace-me-with-real-secret + +# These need to be conditionally selected from +# by a launcher script. Define all of them here. +FHIR_URL_TEST="https://test.fhir.bfd.cmscloud.local" +FHIR_URL_V3_TEST="https://test.fhirv3.bfd.cmscloud.local" +FHIR_URL_SBX="https://prod-sbx.fhir.bfd.cmscloud.local" +FHIR_URL_V3_SBX="https://prod-sbx.fhirv3.bfd.cmscloud.local" + +################################################################################ +# DATABASE +# Shared DB ENV vars file for the "db" service containter. +################################################################################ +POSTGRES_DB=bluebutton +POSTGRES_PASSWORD=toor +POSTGRES_PORT=5432 + +################################################################################ +# BLUE BUTTON ("THE APP") +# Configure the local containerized app for local execution. +################################################################################ +# Top level Djano settings +DJANGO_SETTINGS_MODULE=hhs_oauth_server.settings.dev +DATABASES_CUSTOM=postgres://postgres:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB} +# 20251113 MCJ This does not seem to be used locally. +# OAUTHLIB_INSECURE_TRANSPORT=true +# 20251113 MCJ This is read via `django-getenv` in base.py +DJANGO_SECURE_SESSION=False +# 20251113 MCJ This gets defaulted to true later. Go ahead and set the value. +DJANGO_LOG_JSON_FORMAT_PRETTY=True +# 20251113 MCJ This defaults to '' later, but could also be 'YES'. +# FIXME: This should be made consistent; eg. a '1' or 'True' or 'true' or something. +# I'm no longer leaving these to be set by the environment; all variables will +# be set in this file, and I'll choose defaults I find "upstream." +BB2_SERVER_STD2FILE='' + +# BFD credentials/settings +# 20251113 MCJ Using values that were defaulted in launch scripts. +DJANGO_USER_ID_SALT="6E6F747468657265616C706570706572" +DJANGO_USER_ID_ITERATIONS="2" diff --git a/dev-local/Dockerfile.local b/dev-local/Dockerfile.local new file mode 100644 index 000000000..c70c7fe63 --- /dev/null +++ b/dev-local/Dockerfile.local @@ -0,0 +1,31 @@ +# FIXME: Update to most recent python. +FROM python:3.11 + +# FIXME: Uncertain implications +ENV PYTHONUNBUFFERED=1 +ENV PYDEVD_DISABLE_FILE_VALIDATION=1 +RUN apt-get update \ + && apt-get install -y \ + gettext + +# Set the local user for development +# and mount the codebase at /code +# Set this as the current/active path. +RUN useradd -m -s /bin/bash DEV +USER DEV +ADD .. /code +WORKDIR /code + +# FIXME: Is this how to use venvs +# reliably in a Dockerfile? +RUN python -m venv /tmp/venv +RUN . /tmp/venv/bin/activate +ENV PATH="/tmp/venv/bin:${PATH}" +RUN pip install --upgrade \ + pip \ + pip-tools \ + setuptools +RUN pip install -r requirements/requirements.dev.txt \ + --no-index \ + --find-links \ + ./vendor/ diff --git a/dev-local/Makefile b/dev-local/Makefile new file mode 100644 index 000000000..39074cdcb --- /dev/null +++ b/dev-local/Makefile @@ -0,0 +1,20 @@ +all: build-local run-local + +build-local: + cd msls-local ; make all ; cd .. + docker build \ + --platform "linux/amd64" \ + -t bb-local:latest \ + -f Dockerfile.local .. + +# https://stackoverflow.com/questions/2826029/passing-additional-variables-from-command-line-to-make + +run-local: +# Environments in Makefiles are strange. Once we source, we have to run from the same line, basically. +# Also, you have to outdent comments to make sure they aren't passed to the shell. :sigh: + @echo "Configuring for ${ENV}" ; \ + ./retrieve-certs.bash ; \ + source retrieve-salt.bash ; \ + ./check-env-postconditions.bash ; \ + ./run-appropriate-stack.bash + \ No newline at end of file diff --git a/dev-local/README.md b/dev-local/README.md new file mode 100644 index 000000000..db1a6f375 --- /dev/null +++ b/dev-local/README.md @@ -0,0 +1,121 @@ +# containerized local development + +The containerized local build should provide a local development experience that is + +1. not in the way of developing new features +1. allows developers to be confident in the code they develop and ship +1. consistent with production-like environments (as much as possible) + +These tools assume you are a developer working on the project, and have access to the VPN and other systems. + +## pre-requisites + +It is assumed you have a *NIX-like shell, and have the ability to run GNU Make or a reasonable fascimilie thereof. + +A Mac, out-of-the-box, should "just work," as well as an Intel-based Linux host. Under Windows, it is assumed that WSL2 would provide an environment that would work. + +### configuring `kion` + +You should already have a `.kion.yaml` in your home directory. If not, follow the [local desktop development](https://confluence.cms.gov/spaces/BB2/pages/484224999/Local+Desktop+Development) onboarding docs to set up Cloudtamer/`kion`. + +You need to add an alias for this tooling to work. + +``` +favorites: + - name: BB2-NON-PROD + account: + cloud_access_role: + access_type: cli +``` + +If you already have some aliases, you can just add this one to the list. The account number and cloud access role can be obtained from the Cloudtamer dashboard. The alias **must** be named `BB2-NON-PROD` for these tools to work. + +## to start + +Copy `.env.local.example` to `.env.local`. You probably don't *have to* edit anything, but you can if you want. + +## setting local environment variables + +`.env.local` is pre-configured with variables that should "just work" in any local development environment. + +At the top of the file are variables that a developer may want to fiddle with. Specifically: + +* BB20_ENABLE_REMOTE_DEBUG=true +* BB20_REMOTE_DEBUG_WAIT_ATTACH=false +* RUN_ONLINE_TESTS=true + +These variables control the debugger and whether we run tests that require a live connection to BFD in `TEST` and `SBX` environments. The latter is defaulted to `true` to ensure test coverage completeness when developing locally. The local dev tools help automate the process of testing against the online `test` and `sbx` environments. + +## building local image(s) + +The first step is to build the local containers. From the root of the tree, or from within `dev-local`, run + +``` +make build-local +``` + +This should build the image `bb-local:latest`. + +Building the image is only necessary when the `requirements/requirements.txt` or `requirements/requirements.dev.txt` files change. Those requirements get baked into the image; changes to application code should be picked up via dynamic reload during normal development. + +## running the local image + +Next, run the stack. + +### running locally / mocking MSLS + +To run the stack locally, + +``` +make run ENV=local +``` + +This will launch the stack with no connection to live environments, and it will use the mocked MSLS tooling. + +### running against `test` + +When launched with + +``` +make run TARGET=test +``` + +the tooling obtains and sources credentials for running against our `test` environment. + +### running against `sbx` + +Similarly, + +``` +make run TARGET=sbx +``` + +runs against SBX. + +#### what is the difference? + +`test` and `sbx` hit different BFD servers, with different data. + +### what do these run targets do? + +In a nutshell: + +1. Run `kion f BB2-NON-PROD` to authenticate/obtain AWS credentials. +1. Obtain certificates for the remote environment (if you selected `test` or `sbx`) +1. `docker compose --profile mock-sls up` for `local`, `docker compose --profile slsx up` for live envs. + +## future work + +Once this is in place, it is easy to script some additional tools. For example, a menu driven script to... + +1. load synthetic test data (akin to [this](https://github.com/GSA-TTS/FAC/blob/main/backend/util/load_public_dissem_data/manage_local_data.bash)) +1. create/remove admin users +1. Test database dump/load +1. ... + +can be easily added, and leveraged in future containerization/devops work. + +## paths and variables + +This set of tools creates a new directory (`.bb2`) in the developers $HOME. This is treated as a kind of "BB2 config directory" by this local automation tooling. It uses this (new) directory for the simple reason that there are things we do not want floating around in the source tree, if we can avoid it. + diff --git a/dev-local/build-local.sh b/dev-local/build-local.sh new file mode 100755 index 000000000..b65b4ab0a --- /dev/null +++ b/dev-local/build-local.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +# Exit if any command results in a non-zero exit status. +set -e + + +docker build \ + --platform "linux/amd64" \ + -t bb-local:latest \ + -f Dockerfile.local .. \ No newline at end of file diff --git a/dev-local/check-env-postconditions.bash b/dev-local/check-env-postconditions.bash new file mode 100755 index 000000000..a73a9db17 --- /dev/null +++ b/dev-local/check-env-postconditions.bash @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +./check-env-preconditions.bash + +echo "Checking postconditions." + +if [ "${ENV}" != "local" ]; then + # Check that one of our DJANGO values are populated. + if [ -z ${DJANGO_SLSX_CLIENT_ID} ]; then + echo "Failed to source salt/client values. Exiting." + exit -2 + fi +fi + +echo "All systems go." \ No newline at end of file diff --git a/dev-local/check-env-preconditions.bash b/dev-local/check-env-preconditions.bash new file mode 100755 index 000000000..5bebac882 --- /dev/null +++ b/dev-local/check-env-preconditions.bash @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +if [ "${ENV}" != "local" ]; then + if [ -z ${KION_ACCOUNT_ALIAS} ]; then + echo "You must run 'kion f BB2_NON_PROD' before 'make run ENV=${ENV}'." + echo "Exiting." + fi +fi + +# https://stackoverflow.com/questions/3601515/how-to-check-if-a-variable-is-set-in-bash +if [ -z ${ENV} ]; then + echo "ENV not set. Cannot retrieve certs." + echo "Exiting." + exit -1 +fi diff --git a/dev-local/docker-compose-local.yaml b/dev-local/docker-compose-local.yaml new file mode 100644 index 000000000..e4504b24a --- /dev/null +++ b/dev-local/docker-compose-local.yaml @@ -0,0 +1,72 @@ +services: + #################### + # Postgres database + # Pin to the version of Postgres that is + # equivalent to production. + db: + image: postgres:16 + env_file: + - .env.local + ports: + - "5432:5432" + #################### + # Blue Button + web: + image: bb-local:latest + command: /code/dev-local/start-local.sh + env_file: + # This may have relied on order of execution + # That is, .env was sourced last, possibly overriding other env vars. :/ + - .env.local + volumes: + - ..:/code + - ~/.bb2/certstore:/certstore + - ./start-local.sh:/start-local.sh + ports: + - "8000:8000" + - "5678:5678" + depends_on: + - db + profiles: + - slsx + platform: linux/amd64 + #################### + # Local MSLS server + # A small Flask app for testing. + msls: + image: msls-local:latest + command: python app.py + ports: + - "8080:8080" + volumes: + - ./msls-local:/code + profiles: + - mocksls + # web_msls: + # build: . + # command: ./start-local.sh + # env_file: + # - .env.local + # volumes: + # - ..:/code + # - ~/.bb2/certstore:/certstore + # ports: + # - "8000:8000" + # - "5678:5678" + # depends_on: + # - db + # - msls + # profiles: + # - mocksls + # platform: linux/amd64 + # unittests: + # build: . + # command: python3 -m debugpy --listen 0.0.0.0:6789 --wait-for-client runtests.py + # env_file: + # - docker-compose/unittests-env-vars.env + # ports: + # - "6789:6789" + # volumes: + # - .:/code + # profiles: + # - tests diff --git a/dev-local/Dockerfile b/dev-local/msls-local/Dockerfile.msls similarity index 100% rename from dev-local/Dockerfile rename to dev-local/msls-local/Dockerfile.msls diff --git a/dev-local/msls-local/Makefile b/dev-local/msls-local/Makefile new file mode 100644 index 000000000..95ca1ba49 --- /dev/null +++ b/dev-local/msls-local/Makefile @@ -0,0 +1,7 @@ +all: build-local + +build-local: + cd msls-local ; docker build \ + --platform "linux/amd64" \ + -t msls-local:latest \ + -f Dockerfile.msls . \ No newline at end of file diff --git a/dev-local/app.py b/dev-local/msls-local/app.py similarity index 100% rename from dev-local/app.py rename to dev-local/msls-local/app.py diff --git a/dev-local/requirements.txt b/dev-local/msls-local/requirements.txt similarity index 100% rename from dev-local/requirements.txt rename to dev-local/msls-local/requirements.txt diff --git a/dev-local/templates/login.html b/dev-local/msls-local/templates/login.html similarity index 100% rename from dev-local/templates/login.html rename to dev-local/msls-local/templates/login.html diff --git a/dev-local/users.csv b/dev-local/msls-local/users.csv similarity index 100% rename from dev-local/users.csv rename to dev-local/msls-local/users.csv diff --git a/dev-local/retrieve-certs.bash b/dev-local/retrieve-certs.bash new file mode 100755 index 000000000..5bec67878 --- /dev/null +++ b/dev-local/retrieve-certs.bash @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +# It is assumed that the Makefile was invoked with +# +# ENV=test +# +# or +# +# ENV=sbx +# in order to get here. Or, you could run this directly with +# +# ENV=test ./retrieve-certs.bash +# +# or similar, for testing. + +./check-env-preconditions.bash + +# We have to grab the right secret. +# We use a suffix on a base path for that. +if [ "${ENV}" = "local" ]; then + echo "Running locally. Not retrieving certs." + exit 0 +elif [ "${ENV}" = "test" ]; then + export SUFFIX="_test" +elif [ "${ENV}" = "sbx" ]; then + export SUFFIX="" +else + echo "ENV must be set to 'test' or 'sbx'." + echo "ENV is currently set to '${ENV}'." + echo "Exiting." + exit -2 +fi + + +# We will (rudely) create a .bb2 directory in the user's homedir. +# Let's call that BB2_CONFIG_DIR +export BB2_CONFIG_DIR="${HOME}/.bb2" +mkdir -p "${BB2_CONFIG_DIR}" +# And, lets put the certs in their own subdir. +export BB2_CERTSTORE="${BB2_CONFIG_DIR}/certstore" +mkdir -p "${BB2_CERTSTORE}" + +echo "Retrieving certs for the '${ENV}' environment." +aws secretsmanager get-secret-value --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_certificate${SUFFIX} --query 'SecretString' --output text | base64 -d > "${BB2_CERTSTORE}/ca.cert.pem" + +if [ $? -ne 0 ]; then + echo "Failed to retrieve cert. Exiting." + exit -3 +fi + +aws secretsmanager get-secret-value --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_private_key${SUFFIX} --query 'SecretString' --output text | base64 -d > "${BB2_CERTSTORE}/ca.key.nocrypt.pem" + +if [ $? -ne 0 ]; then + echo "Failed to retrieve private key. Exiting." + exit -4 +fi + +echo "Retrieved cert and key for '${ENV}'." \ No newline at end of file diff --git a/dev-local/retrieve-salt.bash b/dev-local/retrieve-salt.bash new file mode 100755 index 000000000..9aafc1867 --- /dev/null +++ b/dev-local/retrieve-salt.bash @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +# It is assumed that the Makefile was invoked with +# +# ENV=test +# +# or +# +# ENV=sbx +# in order to get here. Or, you could run this directly with +# +# ENV=test source retrieve-salt.bash +# +# or similar, for testing. + +./check-env-preconditions.bash + +if [ "${ENV}" = "local" ]; then + echo "Running locally. Not retrieving salt." + return 0 +elif [ "${ENV}" = "test" ]; then + echo "Retrieving salt/client values for '${ENV}'." +elif [ "${ENV}" = "sbx" ]; then + echo "Retrieving salt/client values for '${ENV}'." +else + echo "ENV must be set to 'test' or 'sbx'." + echo "ENV is currently set to '${ENV}'." + echo "Exiting." + exit -2 +fi + +# These seem to be the same regardless of the env (test or sbx). +export DJANGO_USER_ID_SALT=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_user_id_salt --query 'SecretString' --output text) +export DJANGO_USER_ID_ITERATIONS=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_user_id_iterations --query 'SecretString' --output text) +export DJANGO_SLSX_CLIENT_ID=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/slsx_client_id --query 'SecretString' --output text) +export DJANGO_SLSX_CLIENT_SECRET=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/slsx_client_secret --query 'SecretString' --output text) +export DJANGO_PASSWORD_HASH_ITERATIONS=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_password_hash_iterations --query 'SecretString' --output text) + +echo "Success." \ No newline at end of file diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash new file mode 100755 index 000000000..c0e4d7851 --- /dev/null +++ b/dev-local/run-appropriate-stack.bash @@ -0,0 +1,45 @@ +#!/usr/bin/env bash + +./check-env-preconditions.bash + +echo "Launching the stack. Woosh." + + +if [ "${ENV}" = "local" ]; then + PROFILE="mock-sls" +elif [ "${ENV}" = "test" ]; then + PROFILE="slsx" +elif [ "${ENV}" = "sbx" ]; then + PROFILE="slsx" +else + echo "ENV must be set to 'test' or 'sbx'." + echo "ENV is currently set to '${ENV}'." + echo "Exiting." + exit -2 +fi + +DOCKER_PS=$(docker ps -q) +echo $DOCKER_PS + +TAKE_IT_DOWN="NO" +for id in $DOCKER_PS; do + NAME=$(docker inspect --format '{{.Config.Image}}' $id) + if [[ "${NAME}" =~ "postgres" ]]; then + echo "I think things are still running. Bringing the stack down." + TAKE_IT_DOWN="YES" + fi +done + +if [ "${TAKE_IT_DOWN}" = "YES" ]; then + for id in $DOCKER_PS; do + echo "Stopping container $id" + docker stop $id + done +fi + +echo "Vernier start." + +docker compose \ + --profile slsx \ + -f docker-compose-local.yaml \ + up \ No newline at end of file diff --git a/dev-local/start-local.sh b/dev-local/start-local.sh new file mode 100755 index 000000000..4bb92c774 --- /dev/null +++ b/dev-local/start-local.sh @@ -0,0 +1,85 @@ +#!/usr/bin/env bash + +DB_MIGRATIONS=${DB_MIGRATIONS:-true} +SUPER_USER_NAME=${SUPER_USER_NAME:-'root'} +SUPER_USER_EMAIL=${SUPER_USER_EMAIL:-'bluebutton@example.com'} +SUPER_USER_PASSWORD=${SUPER_USER_PASSWORD:-'bluebutton123'} +BB20_ENABLE_REMOTE_DEBUG=${BB20_ENABLE_REMOTE_DEBUG:-false} +BB20_REMOTE_DEBUG_WAIT_ATTACH=${BB20_REMOTE_DEBUG_WAIT_ATTACH:-false} +BB2_SERVER_STD2FILE=${BB2_SERVER_STD2FILE:-''} + +DJANGO_LOG_JSON_FORMAT_PRETTY=${DJANGO_LOG_JSON_FORMAT_PRETTY:-True} +DJANGO_USER_ID_SALT=${DJANGO_USER_ID_SALT:-"6E6F747468657265616C706570706572"} +DJANGO_USER_ID_ITERATIONS=${DJANGO_USER_ID_ITERATIONS:-"2"} + +if [ "${DJANGO_SLSX_CLIENT_SECRET}" = "xxxxx" ] +then + # for msls + echo "MSLS used for identity service..." +else + echo "SLSX used for identity service..." + DJANGO_MEDICARE_SLSX_REDIRECT_URI=${DJANGO_MEDICARE_SLSX_REDIRECT_URI:-"http://localhost:8000/mymedicare/sls-callback"} + DJANGO_MEDICARE_SLSX_LOGIN_URI=${DJANGO_MEDICARE_SLSX_LOGIN_URI:-"https://test.medicare.gov/sso/authorize?client_id=bb2api"} + DJANGO_SLSX_HEALTH_CHECK_ENDPOINT=${DJANGO_SLSX_HEALTH_CHECK_ENDPOINT:-"https://test.accounts.cms.gov/health"} + DJANGO_SLSX_TOKEN_ENDPOINT=${DJANGO_SLSX_TOKEN_ENDPOINT:-"https://test.medicare.gov/sso/session"} + DJANGO_SLSX_SIGNOUT_ENDPOINT=${DJANGO_SLSX_SIGNOUT_ENDPOINT:-"https://test.medicare.gov/sso/signout"} + DJANGO_SLSX_USERINFO_ENDPOINT=${DJANGO_SLSX_USERINFO_ENDPOINT:-"https://test.accounts.cms.gov/v1/users"} +fi + +if [ "${DB_MIGRATIONS}" = true ] +then + echo "run db image migration and models initialization." + python manage.py migrate + + echo "from django.contrib.auth.models import User; User.objects.create_superuser('${SUPER_USER_NAME}', '${SUPER_USER_EMAIL}', '${SUPER_USER_PASSWORD}')" | python manage.py shell + python manage.py create_admin_groups + python manage.py loaddata scopes.json + python manage.py create_blue_button_scopes + python manage.py create_test_user_and_application + python manage.py create_user_identification_label_selection + echo "creating feature switches......" + python manage.py create_test_feature_switches +else + echo "restarting blue button server, no db image migration and models initialization will run here, you might need to manually run DB image migrations." +fi + +if [ ! -d 'bluebutton-css' ] +then + git clone https://github.com/CMSgov/bluebutton-css.git +else + echo 'CSS already installed.' +fi + +if [ "${BB20_ENABLE_REMOTE_DEBUG}" = true ] +then + if [ "${BB20_REMOTE_DEBUG_WAIT_ATTACH}" = true ] + then + if [ "${BB2_SERVER_STD2FILE}" = "YES" ] + then + echo "Start bluebutton server with remote debugging and wait attach..., std redirect to file: ${BB2_SERVER_STD2FILE}" + python3 -m debugpy --listen 0.0.0.0:5678 --wait-for-client manage.py runserver 0.0.0.0:8000 > ./docker-compose/tmp/bb2_email_to_stdout.log 2>&1 + else + echo "Start bluebutton server with remote debugging and wait attach..." + # NOTE: The "--noreload" option can be added below to disable if needed + python3 -m debugpy --listen 0.0.0.0:5678 --wait-for-client manage.py runserver 0.0.0.0:8000 + fi + else + if [ "${BB2_SERVER_STD2FILE}" = "YES" ] + then + echo "Start bluebutton server with remote debugging..., std redirect to file: ${BB2_SERVER_STD2FILE}" + python3 -m debugpy --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 > ./docker-compose/tmp/bb2_email_to_stdout.log 2>&1 + else + echo "Start bluebutton server with remote debugging..." + python3 -m debugpy --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 + fi + fi +else + if [ "${BB2_SERVER_STD2FILE}" = "YES" ] + then + echo "Start bluebutton server ..., std redirect to file: ${BB2_SERVER_STD2FILE}" + python3 manage.py runserver 0.0.0.0:8000 > ./docker-compose/tmp/bb2_email_to_stdout.log 2>&1 + else + echo "Start bluebutton server ..." + python3 manage.py runserver 0.0.0.0:8000 + fi +fi From 8e988b4a1ab79c706d92557f45cceac69c331fb0 Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Mon, 17 Nov 2025 10:00:57 -0500 Subject: [PATCH 02/14] Updating; looks like it builds/runs. --- dev-local/{.env.local => .env.local.example} | 16 +++++----------- dev-local/.gitignore | 1 + dev-local/Makefile | 7 +++++-- dev-local/README.md | 15 ++++++++++----- dev-local/check-env-preconditions.bash | 5 ++++- dev-local/run-appropriate-stack.bash | 15 ++++++++++----- .../msls-local => msls-local}/Dockerfile.msls | 0 {dev-local/msls-local => msls-local}/Makefile | 0 {dev-local/msls-local => msls-local}/app.py | 0 .../msls-local => msls-local}/requirements.txt | 0 .../templates/login.html | 0 {dev-local/msls-local => msls-local}/users.csv | 0 12 files changed, 35 insertions(+), 24 deletions(-) rename dev-local/{.env.local => .env.local.example} (80%) create mode 100644 dev-local/.gitignore rename {dev-local/msls-local => msls-local}/Dockerfile.msls (100%) rename {dev-local/msls-local => msls-local}/Makefile (100%) rename {dev-local/msls-local => msls-local}/app.py (100%) rename {dev-local/msls-local => msls-local}/requirements.txt (100%) rename {dev-local/msls-local => msls-local}/templates/login.html (100%) rename {dev-local/msls-local => msls-local}/users.csv (100%) diff --git a/dev-local/.env.local b/dev-local/.env.local.example similarity index 80% rename from dev-local/.env.local rename to dev-local/.env.local.example index 648797bcc..5b17cb3fe 100644 --- a/dev-local/.env.local +++ b/dev-local/.env.local.example @@ -24,10 +24,6 @@ AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN} -# 20251113 MCJ This is used nowhere in the codebase. -# FIXME: We should not have anything that is windows-specific. -# All development should happen in authentic, containerized environments. -# COMPOSE_CONVERT_WINDOWS_PATHS=1 # Local superuser account SUPER_USER_NAME=root SUPER_USER_PASSWORD=blue123 @@ -59,16 +55,14 @@ POSTGRES_PORT=5432 # Top level Djano settings DJANGO_SETTINGS_MODULE=hhs_oauth_server.settings.dev DATABASES_CUSTOM=postgres://postgres:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB} -# 20251113 MCJ This does not seem to be used locally. -# OAUTHLIB_INSECURE_TRANSPORT=true -# 20251113 MCJ This is read via `django-getenv` in base.py +# We need this so that our local `http://localhost:8000/` URLs work in the test client/local stack. +OAUTHLIB_INSECURE_TRANSPORT=true +# This is read via `django-getenv` in base.py DJANGO_SECURE_SESSION=False -# 20251113 MCJ This gets defaulted to true later. Go ahead and set the value. +# This gets defaulted to true later. Go ahead and set the value. DJANGO_LOG_JSON_FORMAT_PRETTY=True # 20251113 MCJ This defaults to '' later, but could also be 'YES'. -# FIXME: This should be made consistent; eg. a '1' or 'True' or 'true' or something. -# I'm no longer leaving these to be set by the environment; all variables will -# be set in this file, and I'll choose defaults I find "upstream." +# This outputs logs to a file locally. BB2_SERVER_STD2FILE='' # BFD credentials/settings diff --git a/dev-local/.gitignore b/dev-local/.gitignore new file mode 100644 index 000000000..11ee75815 --- /dev/null +++ b/dev-local/.gitignore @@ -0,0 +1 @@ +.env.local diff --git a/dev-local/Makefile b/dev-local/Makefile index 39074cdcb..39eb64fdf 100644 --- a/dev-local/Makefile +++ b/dev-local/Makefile @@ -1,11 +1,13 @@ all: build-local run-local build-local: - cd msls-local ; make all ; cd .. + cd ../msls-local ; make all ; \ + cd ../dev-local ; \ docker build \ --platform "linux/amd64" \ -t bb-local:latest \ - -f Dockerfile.local .. + -f Dockerfile.local .. ; \ + cd .. # https://stackoverflow.com/questions/2826029/passing-additional-variables-from-command-line-to-make @@ -13,6 +15,7 @@ run-local: # Environments in Makefiles are strange. Once we source, we have to run from the same line, basically. # Also, you have to outdent comments to make sure they aren't passed to the shell. :sigh: @echo "Configuring for ${ENV}" ; \ + ./check-env-preconditions.bash ; \ ./retrieve-certs.bash ; \ source retrieve-salt.bash ; \ ./check-env-postconditions.bash ; \ diff --git a/dev-local/README.md b/dev-local/README.md index db1a6f375..27c1b7500 100644 --- a/dev-local/README.md +++ b/dev-local/README.md @@ -67,7 +67,7 @@ Next, run the stack. To run the stack locally, ``` -make run ENV=local +make run-local ENV=local ``` This will launch the stack with no connection to live environments, and it will use the mocked MSLS tooling. @@ -77,7 +77,7 @@ This will launch the stack with no connection to live environments, and it will When launched with ``` -make run TARGET=test +make run-local TARGET=test ``` the tooling obtains and sources credentials for running against our `test` environment. @@ -87,7 +87,7 @@ the tooling obtains and sources credentials for running against our `test` envir Similarly, ``` -make run TARGET=sbx +make run-local TARGET=sbx ``` runs against SBX. @@ -106,16 +106,21 @@ In a nutshell: ## future work -Once this is in place, it is easy to script some additional tools. For example, a menu driven script to... +Once this is in place, it is easy to script/add Makefile targets for some additional tools. For example, we could have a menu driven script to... 1. load synthetic test data (akin to [this](https://github.com/GSA-TTS/FAC/blob/main/backend/util/load_public_dissem_data/manage_local_data.bash)) 1. create/remove admin users 1. Test database dump/load +1. Run Selenium tests 1. ... can be easily added, and leveraged in future containerization/devops work. ## paths and variables -This set of tools creates a new directory (`.bb2`) in the developers $HOME. This is treated as a kind of "BB2 config directory" by this local automation tooling. It uses this (new) directory for the simple reason that there are things we do not want floating around in the source tree, if we can avoid it. +This set of tools creates a new directory (`.bb2`) in the developers $HOME. This is treated as a kind of "BB2 config directory" by this local automation tooling. It uses this (new) directory for the simple reason that there are things we do not want floating around in the source tree, if we can avoid it. Specifically, we do not want to download the certs for `test` and `sbx` into the git tree. + +## notes + + diff --git a/dev-local/check-env-preconditions.bash b/dev-local/check-env-preconditions.bash index 5bebac882..4b794e201 100755 --- a/dev-local/check-env-preconditions.bash +++ b/dev-local/check-env-preconditions.bash @@ -2,7 +2,7 @@ if [ "${ENV}" != "local" ]; then if [ -z ${KION_ACCOUNT_ALIAS} ]; then - echo "You must run 'kion f BB2_NON_PROD' before 'make run ENV=${ENV}'." + echo "You must run 'kion f BB2_NON_PROD' before 'make run ENV=test'." echo "Exiting." fi fi @@ -10,6 +10,9 @@ fi # https://stackoverflow.com/questions/3601515/how-to-check-if-a-variable-is-set-in-bash if [ -z ${ENV} ]; then echo "ENV not set. Cannot retrieve certs." + echo "ENV must be one of 'local', 'test', or 'sbx'." + echo "For example:" + echo " make run-local ENV=test" echo "Exiting." exit -1 fi diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash index c0e4d7851..694285759 100755 --- a/dev-local/run-appropriate-stack.bash +++ b/dev-local/run-appropriate-stack.bash @@ -2,17 +2,21 @@ ./check-env-preconditions.bash -echo "Launching the stack. Woosh." +echo "🚀 Launching the stack." if [ "${ENV}" = "local" ]; then PROFILE="mock-sls" elif [ "${ENV}" = "test" ]; then PROFILE="slsx" + export FHIR_URL="${FHIR_URL_TEST}" + export FHIR_URL_V3="${FHIR_URL_V3_TEST}" elif [ "${ENV}" = "sbx" ]; then PROFILE="slsx" + export FHIR_URL="${FHIR_URL_SBX}" + export FHIR_URL_V3="${FHIR_URL_V3_SBX}" else - echo "ENV must be set to 'test' or 'sbx'." + echo "ENV must be set to 'local', 'test', or 'sbx'." echo "ENV is currently set to '${ENV}'." echo "Exiting." exit -2 @@ -25,19 +29,20 @@ TAKE_IT_DOWN="NO" for id in $DOCKER_PS; do NAME=$(docker inspect --format '{{.Config.Image}}' $id) if [[ "${NAME}" =~ "postgres" ]]; then - echo "I think things are still running. Bringing the stack down." + echo "🤔 I think things are still running. Bringing the stack down." TAKE_IT_DOWN="YES" fi done if [ "${TAKE_IT_DOWN}" = "YES" ]; then for id in $DOCKER_PS; do - echo "Stopping container $id" + echo "🛑 Stopping container $id" docker stop $id done fi -echo "Vernier start." +echo "📊 Vernier start." +echo docker compose \ --profile slsx \ diff --git a/dev-local/msls-local/Dockerfile.msls b/msls-local/Dockerfile.msls similarity index 100% rename from dev-local/msls-local/Dockerfile.msls rename to msls-local/Dockerfile.msls diff --git a/dev-local/msls-local/Makefile b/msls-local/Makefile similarity index 100% rename from dev-local/msls-local/Makefile rename to msls-local/Makefile diff --git a/dev-local/msls-local/app.py b/msls-local/app.py similarity index 100% rename from dev-local/msls-local/app.py rename to msls-local/app.py diff --git a/dev-local/msls-local/requirements.txt b/msls-local/requirements.txt similarity index 100% rename from dev-local/msls-local/requirements.txt rename to msls-local/requirements.txt diff --git a/dev-local/msls-local/templates/login.html b/msls-local/templates/login.html similarity index 100% rename from dev-local/msls-local/templates/login.html rename to msls-local/templates/login.html diff --git a/dev-local/msls-local/users.csv b/msls-local/users.csv similarity index 100% rename from dev-local/msls-local/users.csv rename to msls-local/users.csv From fad3d72055bbd63f0f7aef6a06a71c75e9c773e4 Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Mon, 17 Nov 2025 16:18:19 -0500 Subject: [PATCH 03/14] Stack talks to BFD This worked for test. There is now more cleanup. There's a notional split between "setting up to run" and "getting the variables into the container." This moves all the work out of the container. It could, perhaps, all be moved into the container if the correct env was passed for secrets gathering? --- dev-local/.env.container | 87 ++++++++++++ dev-local/Makefile | 11 +- dev-local/README.md | 1 - dev-local/check-env-postconditions.bash | 15 -- dev-local/check-env-preconditions.bash | 18 --- dev-local/check-pre-post.bash | 58 ++++++++ dev-local/docker-compose-local.yaml | 33 ++--- dev-local/retrieve-certs-and-salt.bash | 176 ++++++++++++++++++++++++ dev-local/retrieve-certs.bash | 58 -------- dev-local/retrieve-salt.bash | 39 ------ dev-local/run-appropriate-stack.bash | 61 +++++--- dev-local/start-local.sh | 28 +--- 12 files changed, 387 insertions(+), 198 deletions(-) create mode 100644 dev-local/.env.container delete mode 100755 dev-local/check-env-postconditions.bash delete mode 100755 dev-local/check-env-preconditions.bash create mode 100644 dev-local/check-pre-post.bash create mode 100755 dev-local/retrieve-certs-and-salt.bash delete mode 100755 dev-local/retrieve-certs.bash delete mode 100755 dev-local/retrieve-salt.bash diff --git a/dev-local/.env.container b/dev-local/.env.container new file mode 100644 index 000000000..eab10c3de --- /dev/null +++ b/dev-local/.env.container @@ -0,0 +1,87 @@ +################################################################################ +# ABOUT THESE VALUES +# These values get sourced into `run-appropriate-stack` to do some work +# pre-launch, and then they are sourced into `start-local` inside the container +# to do *more* work + +################################################################################ +# DEVELOPER VARIABLES +# You may want to tweak these any given day of the week. + +## enable debugpy remote debugging (on port 5678) +# 20251113 MCJ This clearly works, but it does not seem to be used anywhere +# that would actually affect application behavior. :confused: +BB20_ENABLE_REMOTE_DEBUG="${BB20_ENABLE_REMOTE_DEBUG}" +BB20_REMOTE_DEBUG_WAIT_ATTACH="${BB20_REMOTE_DEBUG_WAIT_ATTACH}" +# Setting this to `false` will disable tests that run against the live BFD server. +# To run them locally, you need to be authenticated and on the VPN. +RUN_ONLINE_TESTS="${RUN_ONLINE_TESTS}" +# You probably shouldn't touch these. +DJANGO_FHIR_CERTSTORE="${DJANGO_FHIR_CERTSTORE}" + +################################################################################ +# AWS CREDENTIALS +# These values must be loaded into the local environment at the time the +# stack is started. That implies (perhaps) a `kion s` or similar has +# been executed before the `docker compose up`. +AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" +AWS_SECRET_ACCESS_KEY="${AWS_SECRET_ACCESS_KEY}" +AWS_ACCESS_KEY_ID="${AWS_ACCESS_KEY_ID}" +AWS_SESSION_TOKEN="${AWS_SESSION_TOKEN}" + +# Local superuser account +SUPER_USER_NAME="${SUPER_USER_NAME}" +SUPER_USER_PASSWORD="${SUPER_USER_PASSWORD}" +SUPER_USER_EMAIL="${SUPER_USER_EMAIL}" +# We run migrations *always* when running locally +DB_MIGRATIONS="${DB_MIGRATIONS}" +# This would be cryptographically secure in production. +DJANGO_SECRET_KEY="${DJANGO_SECRET_KEY}" + +# These need to be conditionally selected from +# by a launcher script. Define all of them here. +FHIR_URL="${FHIR_URL}" +FHIR_URL_V3="${FHIR_URL_V3}" + +################################################################################ +# DATABASE +# Shared DB ENV vars file for the "db" service containter. +################################################################################ +POSTGRES_DB="${POSTGRES_DB}" +POSTGRES_PASSWORD="${POSTGRES_PASSWORD}" +POSTGRES_PORT="${POSTGRES_PORT}" + +################################################################################ +# BLUE BUTTON ("THE APP") +# Configure the local containerized app for local execution. +################################################################################ +# Top level Djano settings +DJANGO_SETTINGS_MODULE="${DJANGO_SETTINGS_MODULE}" +DATABASES_CUSTOM="${DATABASES_CUSTOM}" +# We need this so that our local `http://localhost:8000/` URLs work in the test client/local stack. +OAUTHLIB_INSECURE_TRANSPORT="${OAUTHLIB_INSECURE_TRANSPORT}" +# This is read via `django-getenv` in base.py +DJANGO_SECURE_SESSION="${DJANGO_SECURE_SESSION}" +# This gets defaulted to true later. Go ahead and set the value. +DJANGO_LOG_JSON_FORMAT_PRETTY="${DJANGO_LOG_JSON_FORMAT_PRETTY}" +# 20251113 MCJ This defaults to '' later, but could also be 'YES'. +# This outputs logs to a file locally. +BB2_SERVER_STD2FILE="${BB2_SERVER_STD2FILE}" + +# BFD credentials/settings +# 20251113 MCJ Using values that were defaulted in launch scripts. +# DJANGO_USER_ID_SALT="6E6F747468657265616C706570706572" +# DJANGO_USER_ID_ITERATIONS="2" + +DJANGO_MEDICARE_SLSX_LOGIN_URI="${DJANGO_MEDICARE_SLSX_LOGIN_URI}" +DJANGO_MEDICARE_SLSX_REDIRECT_URI="${DJANGO_MEDICARE_SLSX_REDIRECT_URI}" +DJANGO_PASSWORD_HASH_ITERATIONS="${DJANGO_PASSWORD_HASH_ITERATIONS}" +DJANGO_SLSX_CLIENT_ID="${DJANGO_SLSX_CLIENT_ID}" +DJANGO_SLSX_CLIENT_SECRET="${DJANGO_SLSX_CLIENT_SECRET}" +DJANGO_SLSX_HEALTH_CHECK_ENDPOINT="${DJANGO_SLSX_HEALTH_CHECK_ENDPOINT}" +DJANGO_SLSX_SIGNOUT_ENDPOINT="${DJANGO_SLSX_SIGNOUT_ENDPOINT}" +DJANGO_SLSX_TOKEN_ENDPOINT="${DJANGO_SLSX_TOKEN_ENDPOINT}" +DJANGO_SLSX_USERINFO_ENDPOINT="${DJANGO_SLSX_USERINFO_ENDPOINT}" +DJANGO_SLSX_VERIFY_SSL_EXTERNAL="${DJANGO_SLSX_VERIFY_SSL_EXTERNAL}" +DJANGO_USER_ID_ITERATIONS="${DJANGO_USER_ID_ITERATIONS}" +DJANGO_USER_ID_SALT="${DJANGO_USER_ID_SALT}" diff --git a/dev-local/Makefile b/dev-local/Makefile index 39eb64fdf..c03f2a8a0 100644 --- a/dev-local/Makefile +++ b/dev-local/Makefile @@ -11,13 +11,16 @@ build-local: # https://stackoverflow.com/questions/2826029/passing-additional-variables-from-command-line-to-make +# ./check-env-preconditions.bash ; \ +# source ./.env.local ; \ +# ./retrieve-certs.bash ; \ +# source retrieve-salt.bash ; \ +# ./check-env-postconditions.bash ; \ + + run-local: # Environments in Makefiles are strange. Once we source, we have to run from the same line, basically. # Also, you have to outdent comments to make sure they aren't passed to the shell. :sigh: @echo "Configuring for ${ENV}" ; \ - ./check-env-preconditions.bash ; \ - ./retrieve-certs.bash ; \ - source retrieve-salt.bash ; \ - ./check-env-postconditions.bash ; \ ./run-appropriate-stack.bash \ No newline at end of file diff --git a/dev-local/README.md b/dev-local/README.md index 27c1b7500..176fa29e0 100644 --- a/dev-local/README.md +++ b/dev-local/README.md @@ -120,7 +120,6 @@ can be easily added, and leveraged in future containerization/devops work. This set of tools creates a new directory (`.bb2`) in the developers $HOME. This is treated as a kind of "BB2 config directory" by this local automation tooling. It uses this (new) directory for the simple reason that there are things we do not want floating around in the source tree, if we can avoid it. Specifically, we do not want to download the certs for `test` and `sbx` into the git tree. -## notes diff --git a/dev-local/check-env-postconditions.bash b/dev-local/check-env-postconditions.bash deleted file mode 100755 index a73a9db17..000000000 --- a/dev-local/check-env-postconditions.bash +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -./check-env-preconditions.bash - -echo "Checking postconditions." - -if [ "${ENV}" != "local" ]; then - # Check that one of our DJANGO values are populated. - if [ -z ${DJANGO_SLSX_CLIENT_ID} ]; then - echo "Failed to source salt/client values. Exiting." - exit -2 - fi -fi - -echo "All systems go." \ No newline at end of file diff --git a/dev-local/check-env-preconditions.bash b/dev-local/check-env-preconditions.bash deleted file mode 100755 index 4b794e201..000000000 --- a/dev-local/check-env-preconditions.bash +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -if [ "${ENV}" != "local" ]; then - if [ -z ${KION_ACCOUNT_ALIAS} ]; then - echo "You must run 'kion f BB2_NON_PROD' before 'make run ENV=test'." - echo "Exiting." - fi -fi - -# https://stackoverflow.com/questions/3601515/how-to-check-if-a-variable-is-set-in-bash -if [ -z ${ENV} ]; then - echo "ENV not set. Cannot retrieve certs." - echo "ENV must be one of 'local', 'test', or 'sbx'." - echo "For example:" - echo " make run-local ENV=test" - echo "Exiting." - exit -1 -fi diff --git a/dev-local/check-pre-post.bash b/dev-local/check-pre-post.bash new file mode 100644 index 000000000..16c603d06 --- /dev/null +++ b/dev-local/check-pre-post.bash @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +check_valid_env () { + if [[ "${ENV}" == "local" ]]; then + # This is a no-op. + : + ##### + # TEST + elif [[ "${ENV}" == "test" ]]; then + : + ##### + # SBX + elif [[ "${ENV}" == "sbx" ]]; then + : + ##### + # ERR + else + echo "ENV must be set to 'local', 'test', or 'sbx'." + echo "ENV is currently set to '${ENV}'." + echo "Exiting." + exit -2 + fi + +} + +check_env_preconditions () { + if [ "${ENV}" != "local" ]; then + if [ -z ${KION_ACCOUNT_ALIAS} ]; then + echo "You must run 'kion f BB2_NON_PROD' before 'make run ENV=test'." + echo "Exiting." + return -1 + fi + fi + + # https://stackoverflow.com/questions/3601515/how-to-check-if-a-variable-is-set-in-bash + if [ -z ${ENV} ]; then + echo "ENV not set. Cannot retrieve certs." + echo "ENV must be one of 'local', 'test', or 'sbx'." + echo "For example:" + echo " make run-local ENV=test" + echo "Exiting." + return -1 + fi + + if [ -z ${OAUTHLIB_INSECURE_TRANSPORT} ]; then + echo "We need insecure transport when running locally." + echo "OAUTHLIB_INSECURE_TRANSPORT was not set to true." + echo "Exiting." + return -1 + fi + + if [ -z ${DB_MIGRATIONS} ]; then + echo "There should be a DB_MIGRATIONS flag." + echo "Exiting." + return -1 + fi +} + diff --git a/dev-local/docker-compose-local.yaml b/dev-local/docker-compose-local.yaml index e4504b24a..d8c3aad9b 100644 --- a/dev-local/docker-compose-local.yaml +++ b/dev-local/docker-compose-local.yaml @@ -10,14 +10,25 @@ services: ports: - "5432:5432" #################### + # Local MSLS server + # A small Flask app for testing. + msls: + image: msls-local:latest + command: python app.py + ports: + - "8080:8080" + volumes: + - ../msls-local:/code + profiles: + - mocksls + - slsx + #################### # Blue Button web: image: bb-local:latest - command: /code/dev-local/start-local.sh + command: /start-local.sh env_file: - # This may have relied on order of execution - # That is, .env was sourced last, possibly overriding other env vars. :/ - - .env.local + - .env.container volumes: - ..:/code - ~/.bb2/certstore:/certstore @@ -27,21 +38,11 @@ services: - "5678:5678" depends_on: - db + - msls profiles: - slsx platform: linux/amd64 - #################### - # Local MSLS server - # A small Flask app for testing. - msls: - image: msls-local:latest - command: python app.py - ports: - - "8080:8080" - volumes: - - ./msls-local:/code - profiles: - - mocksls + # web_msls: # build: . # command: ./start-local.sh diff --git a/dev-local/retrieve-certs-and-salt.bash b/dev-local/retrieve-certs-and-salt.bash new file mode 100755 index 000000000..e9307fead --- /dev/null +++ b/dev-local/retrieve-certs-and-salt.bash @@ -0,0 +1,176 @@ +#!/usr/bin/env bash + +# This variable determines if we're going to fetch +# cert/salt values from the secret manager. +# We assume yes, but set it to `no` when running fully locally. +export CERT_AND_SALT="YES" + +set_fhir_urls () { + echo "✅ set_fhir_urls" + + if [[ "${ENV}" == "local" ]]; then + echo "🆗 No FHIR URLs set for local testing." + ##### + # TEST + elif [[ "${ENV}" == "test" ]]; then + export FHIR_URL="${FHIR_URL_TEST}" + export FHIR_URL_V3="${FHIR_URL_V3_TEST}" + + ##### + # SBX + elif [[ "${ENV}" == "sbx" ]]; then + export FHIR_URL="${FHIR_URL_SBX}" + export FHIR_URL_V3="${FHIR_URL_V3_SBX}" + fi +} + +set_profile () { + echo "✅ set_profile" + + if [[ "${ENV}" == "local" ]]; then + export PROFILE="mock-sls" + ##### + # TEST + elif [[ "${ENV}" == "test" ]]; then + export PROFILE="slsx" + ##### + # SBX + elif [[ "${ENV}" == "sbx" ]]; then + export PROFILE="slsx" + fi +} + +retrieve_certs () { + echo "✅ retrieve_certs" + + if [[ "${ENV}" == "local" ]]; then + echo "🆗 Running locally. Not retrieving certs." + echo "🆗 Running locally. Not retrieving salt." + CERT_AND_SALT="NO" + export CERT_SUFFIX="" + export DJANGO_USER_ID_SALT="6E6F747468657265616C706570706572" + export DJANGO_USER_ID_ITERATIONS="2" + ##### + # TEST + elif [[ "${ENV}" == "test" ]]; then + export CERT_SUFFIX="_test" + export PROFILE="slsx" + ##### + # SBX + elif [[ "${ENV}" == "sbx" ]]; then + export CERT_SUFFIX="" + export PROFILE="slsx" + fi + + if [[ "${CERT_AND_SALT}" == "YES" ]]; then + # We will (rudely) create a .bb2 directory in the user's homedir. + # Let's call that BB2_CONFIG_DIR + export BB2_CONFIG_DIR="${HOME}/.bb2" + mkdir -p "${BB2_CONFIG_DIR}" + # And, lets put the certs in their own subdir. + export BB2_CERTSTORE="${BB2_CONFIG_DIR}/certstore" + mkdir -p "${BB2_CERTSTORE}" + + CERT="ca.cert.pem" + KEY="ca.key.nocrypt.pem" + + # Remove them first + rm -f "${BB2_CERTSTORE}/$CERT" + rm -f "${BB2_CERTSTORE}/$KEY" + + echo "🎁 Retrieving certs for the '${ENV}' environment with suffix '${CERT_SUFFIX}'." + aws secretsmanager get-secret-value \ + --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_certificate${CERT_SUFFIX} \ + --query 'SecretString' \ + --output text | base64 -d > "${BB2_CERTSTORE}/ca.cert.pem" + + if [ $? -ne 0 ]; then + echo "⛔ Failed to retrieve cert. Exiting." + return -3 + fi + + aws secretsmanager get-secret-value \ + --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_private_key${CERT_SUFFIX} \ + --query 'SecretString' \ + --output text | base64 -d > "${BB2_CERTSTORE}/ca.key.nocrypt.pem" + + if [ $? -ne 0 ]; then + echo "⛔ Failed to retrieve private key. Exiting." + return -4 + fi + + # Check they really came down. + declare -a cert_files=($CERT $KEY) + for FILE in "${cert_files[@]}"; + do + if [ -e "${BB2_CERTSTORE}/${FILE}" ]; then + echo " 🆗 '$FILE' exists." + else + echo " ⛔ '$FILE' does not exist." + return -5 + fi + done + + chmod 600 "${BB2_CERTSTORE}/ca.cert.pem" + chmod 600 "${BB2_CERTSTORE}/ca.key.nocrypt.pem" + + echo "🆗 Retrieved cert and key for '${ENV}'." + fi +} + +set_salt () { + echo "✅ set_salt" + + if [ "${ENV}" = "local" ]; then + echo "🆗 Running locally. Not retrieving salt." + export DJANGO_USER_ID_SALT="6E6F747468657265616C706570706572" + export DJANGO_USER_ID_ITERATIONS="2" + DJANGO_MEDICARE_SLSX_REDIRECT_URI="http://localhost:8000/mymedicare/sls-callback" + DJANGO_MEDICARE_SLSX_LOGIN_URI="http://localhost:8080/sso/authorize?client_id=bb2api" + DJANGO_SLSX_HEALTH_CHECK_ENDPOINT="http://msls:8080/health" + DJANGO_SLSX_TOKEN_ENDPOINT="http://msls:8080/sso/session" + DJANGO_SLSX_SIGNOUT_ENDPOINT="http://msls:8080/sso/signout" + DJANGO_SLSX_USERINFO_ENDPOINT="http://msls:8080/v1/users" + + DJANGO_SLSX_CLIENT_ID=bb2api + DJANGO_SLSX_CLIENT_SECRET="xxxxx" + + DJANGO_SLSX_VERIFY_SSL_INTERNAL="False" + + return 0 + elif [ "${ENV}" = "test" ]; then + echo "🆗 Retrieving salt/client values for '${ENV}'." + elif [ "${ENV}" = "sbx" ]; then + echo "🆗 Retrieving salt/client values for '${ENV}'." + else + echo "⛔ ENV must be set to 'test' or 'sbx'." + echo " ENV is currently set to '${ENV}'." + echo " Exiting." + return -2 + fi + + # These seem to be the same regardless of the env (test or sbx). + export DJANGO_USER_ID_SALT=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_user_id_salt --query 'SecretString' --output text) + export DJANGO_USER_ID_ITERATIONS=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_user_id_iterations --query 'SecretString' --output text) + export DJANGO_SLSX_CLIENT_ID=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/slsx_client_id --query 'SecretString' --output text) + export DJANGO_SLSX_CLIENT_SECRET=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/slsx_client_secret --query 'SecretString' --output text) + export DJANGO_PASSWORD_HASH_ITERATIONS=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_password_hash_iterations --query 'SecretString' --output text) + + echo "Setting SLSX endpoint/redirects..." + export DJANGO_MEDICARE_SLSX_REDIRECT_URI="http://localhost:8000/mymedicare/sls-callback" + export DJANGO_MEDICARE_SLSX_LOGIN_URI="https://test.medicare.gov/sso/authorize?client_id=bb2api" + export DJANGO_SLSX_HEALTH_CHECK_ENDPOINT="https://test.accounts.cms.gov/health" + export DJANGO_SLSX_TOKEN_ENDPOINT="https://test.medicare.gov/sso/session" + export DJANGO_SLSX_SIGNOUT_ENDPOINT="https://test.medicare.gov/sso/signout" + export DJANGO_SLSX_USERINFO_ENDPOINT="https://test.accounts.cms.gov/v1/users" + + # SLSx credentials + export DJANGO_SLSX_CLIENT_ID=bb2api + export DJANGO_SLSX_CLIENT_SECRET=${DJANGO_SLSX_CLIENT_SECRET} + + # SSL verify for internal endpoints can't currently use SSL verification (this may change in the future) + export DJANGO_SLSX_VERIFY_SSL_INTERNAL="False" + # export DJANGO_SLSX_VERIFY_SSL_EXTERNAL="True" + + echo "🆗 Retrieved salt values." +} \ No newline at end of file diff --git a/dev-local/retrieve-certs.bash b/dev-local/retrieve-certs.bash deleted file mode 100755 index 5bec67878..000000000 --- a/dev-local/retrieve-certs.bash +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env bash - -# It is assumed that the Makefile was invoked with -# -# ENV=test -# -# or -# -# ENV=sbx -# in order to get here. Or, you could run this directly with -# -# ENV=test ./retrieve-certs.bash -# -# or similar, for testing. - -./check-env-preconditions.bash - -# We have to grab the right secret. -# We use a suffix on a base path for that. -if [ "${ENV}" = "local" ]; then - echo "Running locally. Not retrieving certs." - exit 0 -elif [ "${ENV}" = "test" ]; then - export SUFFIX="_test" -elif [ "${ENV}" = "sbx" ]; then - export SUFFIX="" -else - echo "ENV must be set to 'test' or 'sbx'." - echo "ENV is currently set to '${ENV}'." - echo "Exiting." - exit -2 -fi - - -# We will (rudely) create a .bb2 directory in the user's homedir. -# Let's call that BB2_CONFIG_DIR -export BB2_CONFIG_DIR="${HOME}/.bb2" -mkdir -p "${BB2_CONFIG_DIR}" -# And, lets put the certs in their own subdir. -export BB2_CERTSTORE="${BB2_CONFIG_DIR}/certstore" -mkdir -p "${BB2_CERTSTORE}" - -echo "Retrieving certs for the '${ENV}' environment." -aws secretsmanager get-secret-value --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_certificate${SUFFIX} --query 'SecretString' --output text | base64 -d > "${BB2_CERTSTORE}/ca.cert.pem" - -if [ $? -ne 0 ]; then - echo "Failed to retrieve cert. Exiting." - exit -3 -fi - -aws secretsmanager get-secret-value --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_private_key${SUFFIX} --query 'SecretString' --output text | base64 -d > "${BB2_CERTSTORE}/ca.key.nocrypt.pem" - -if [ $? -ne 0 ]; then - echo "Failed to retrieve private key. Exiting." - exit -4 -fi - -echo "Retrieved cert and key for '${ENV}'." \ No newline at end of file diff --git a/dev-local/retrieve-salt.bash b/dev-local/retrieve-salt.bash deleted file mode 100755 index 9aafc1867..000000000 --- a/dev-local/retrieve-salt.bash +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash - -# It is assumed that the Makefile was invoked with -# -# ENV=test -# -# or -# -# ENV=sbx -# in order to get here. Or, you could run this directly with -# -# ENV=test source retrieve-salt.bash -# -# or similar, for testing. - -./check-env-preconditions.bash - -if [ "${ENV}" = "local" ]; then - echo "Running locally. Not retrieving salt." - return 0 -elif [ "${ENV}" = "test" ]; then - echo "Retrieving salt/client values for '${ENV}'." -elif [ "${ENV}" = "sbx" ]; then - echo "Retrieving salt/client values for '${ENV}'." -else - echo "ENV must be set to 'test' or 'sbx'." - echo "ENV is currently set to '${ENV}'." - echo "Exiting." - exit -2 -fi - -# These seem to be the same regardless of the env (test or sbx). -export DJANGO_USER_ID_SALT=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_user_id_salt --query 'SecretString' --output text) -export DJANGO_USER_ID_ITERATIONS=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_user_id_iterations --query 'SecretString' --output text) -export DJANGO_SLSX_CLIENT_ID=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/slsx_client_id --query 'SecretString' --output text) -export DJANGO_SLSX_CLIENT_SECRET=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/slsx_client_secret --query 'SecretString' --output text) -export DJANGO_PASSWORD_HASH_ITERATIONS=$(aws secretsmanager get-secret-value --secret-id /bb2/test/app/django_password_hash_iterations --query 'SecretString' --output text) - -echo "Success." \ No newline at end of file diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash index 694285759..d2348a227 100755 --- a/dev-local/run-appropriate-stack.bash +++ b/dev-local/run-appropriate-stack.bash @@ -1,26 +1,42 @@ #!/usr/bin/env bash +source ./check-pre-post.bash +source ./retrieve-certs-and-salt.bash -./check-env-preconditions.bash - -echo "🚀 Launching the stack." - - -if [ "${ENV}" = "local" ]; then - PROFILE="mock-sls" -elif [ "${ENV}" = "test" ]; then - PROFILE="slsx" - export FHIR_URL="${FHIR_URL_TEST}" - export FHIR_URL_V3="${FHIR_URL_V3_TEST}" -elif [ "${ENV}" = "sbx" ]; then - PROFILE="slsx" - export FHIR_URL="${FHIR_URL_SBX}" - export FHIR_URL_V3="${FHIR_URL_V3_SBX}" -else - echo "ENV must be set to 'local', 'test', or 'sbx'." - echo "ENV is currently set to '${ENV}'." - echo "Exiting." - exit -2 -fi +# this says to "export all variables." +set -a +# exit on error. +set -e + +###################################################################### +# let's make sure we have a valid ENV var before proceeding +check_valid_env + +###################################################################### +# source the baseline environment variables +# these set the stage for all further environment manipulation for +# launching the app. +source ./.env.local + +###################################################################### +# let's make sure the .env.local sourced in correctly. +check_env_preconditions + +# set the FHIR_URL and FHIR_URL_V3 +set_fhir_urls + +# set the profile for docker compose +set_profile + +# retrieve the certs and store them in $HOME/.bb2/certstore +retrieve_certs + +set_salt + +echo "🚀 Launching the stack for '${ENV}'." + +echo "FHIR_URLs are:" +echo " * ${FHIR_URL}" +echo " * ${FHIR_URL_V3}" DOCKER_PS=$(docker ps -q) echo $DOCKER_PS @@ -44,7 +60,8 @@ fi echo "📊 Vernier start." echo +echo "PROFILE: ${PROFILE}" docker compose \ - --profile slsx \ + --profile "${PROFILE}" \ -f docker-compose-local.yaml \ up \ No newline at end of file diff --git a/dev-local/start-local.sh b/dev-local/start-local.sh index 4bb92c774..4a40d102a 100755 --- a/dev-local/start-local.sh +++ b/dev-local/start-local.sh @@ -1,32 +1,10 @@ #!/usr/bin/env bash -DB_MIGRATIONS=${DB_MIGRATIONS:-true} -SUPER_USER_NAME=${SUPER_USER_NAME:-'root'} -SUPER_USER_EMAIL=${SUPER_USER_EMAIL:-'bluebutton@example.com'} -SUPER_USER_PASSWORD=${SUPER_USER_PASSWORD:-'bluebutton123'} -BB20_ENABLE_REMOTE_DEBUG=${BB20_ENABLE_REMOTE_DEBUG:-false} -BB20_REMOTE_DEBUG_WAIT_ATTACH=${BB20_REMOTE_DEBUG_WAIT_ATTACH:-false} -BB2_SERVER_STD2FILE=${BB2_SERVER_STD2FILE:-''} +set -a -DJANGO_LOG_JSON_FORMAT_PRETTY=${DJANGO_LOG_JSON_FORMAT_PRETTY:-True} -DJANGO_USER_ID_SALT=${DJANGO_USER_ID_SALT:-"6E6F747468657265616C706570706572"} -DJANGO_USER_ID_ITERATIONS=${DJANGO_USER_ID_ITERATIONS:-"2"} +echo "DB_MIGRATIONS: ${DB_MIGRATIONS}" -if [ "${DJANGO_SLSX_CLIENT_SECRET}" = "xxxxx" ] -then - # for msls - echo "MSLS used for identity service..." -else - echo "SLSX used for identity service..." - DJANGO_MEDICARE_SLSX_REDIRECT_URI=${DJANGO_MEDICARE_SLSX_REDIRECT_URI:-"http://localhost:8000/mymedicare/sls-callback"} - DJANGO_MEDICARE_SLSX_LOGIN_URI=${DJANGO_MEDICARE_SLSX_LOGIN_URI:-"https://test.medicare.gov/sso/authorize?client_id=bb2api"} - DJANGO_SLSX_HEALTH_CHECK_ENDPOINT=${DJANGO_SLSX_HEALTH_CHECK_ENDPOINT:-"https://test.accounts.cms.gov/health"} - DJANGO_SLSX_TOKEN_ENDPOINT=${DJANGO_SLSX_TOKEN_ENDPOINT:-"https://test.medicare.gov/sso/session"} - DJANGO_SLSX_SIGNOUT_ENDPOINT=${DJANGO_SLSX_SIGNOUT_ENDPOINT:-"https://test.medicare.gov/sso/signout"} - DJANGO_SLSX_USERINFO_ENDPOINT=${DJANGO_SLSX_USERINFO_ENDPOINT:-"https://test.accounts.cms.gov/v1/users"} -fi - -if [ "${DB_MIGRATIONS}" = true ] +if [ "${DB_MIGRATIONS}" = "true" ] then echo "run db image migration and models initialization." python manage.py migrate From e02ec6fb232b8695bc8484ef329e72dc95556fbe Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Mon, 17 Nov 2025 16:58:42 -0500 Subject: [PATCH 04/14] A cleaner/quieter boot sequence. --- dev-local/Dockerfile.local | 12 ++++++++++-- dev-local/retrieve-certs-and-salt.bash | 2 +- dev-local/run-appropriate-stack.bash | 1 - dev-local/start-local.sh | 19 +++++++++++++++---- 4 files changed, 26 insertions(+), 8 deletions(-) diff --git a/dev-local/Dockerfile.local b/dev-local/Dockerfile.local index c70c7fe63..cbc282660 100644 --- a/dev-local/Dockerfile.local +++ b/dev-local/Dockerfile.local @@ -1,12 +1,20 @@ # FIXME: Update to most recent python. -FROM python:3.11 +FROM python:3.11-trixie # FIXME: Uncertain implications ENV PYTHONUNBUFFERED=1 ENV PYDEVD_DISABLE_FILE_VALIDATION=1 + +# WARNING +# This is installing the most recent version of Postgres tools. +# We would rather install v16, to match the database, which matches Amazon. RUN apt-get update \ && apt-get install -y \ - gettext + curl \ + gettext \ + gnupg \ + ca-certificates \ + postgresql-client # Set the local user for development # and mount the codebase at /code diff --git a/dev-local/retrieve-certs-and-salt.bash b/dev-local/retrieve-certs-and-salt.bash index e9307fead..862cda041 100755 --- a/dev-local/retrieve-certs-and-salt.bash +++ b/dev-local/retrieve-certs-and-salt.bash @@ -170,7 +170,7 @@ set_salt () { # SSL verify for internal endpoints can't currently use SSL verification (this may change in the future) export DJANGO_SLSX_VERIFY_SSL_INTERNAL="False" - # export DJANGO_SLSX_VERIFY_SSL_EXTERNAL="True" + export DJANGO_SLSX_VERIFY_SSL_EXTERNAL="True" echo "🆗 Retrieved salt values." } \ No newline at end of file diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash index d2348a227..84bb7d16b 100755 --- a/dev-local/run-appropriate-stack.bash +++ b/dev-local/run-appropriate-stack.bash @@ -60,7 +60,6 @@ fi echo "📊 Vernier start." echo -echo "PROFILE: ${PROFILE}" docker compose \ --profile "${PROFILE}" \ -f docker-compose-local.yaml \ diff --git a/dev-local/start-local.sh b/dev-local/start-local.sh index 4a40d102a..e45fed5f5 100755 --- a/dev-local/start-local.sh +++ b/dev-local/start-local.sh @@ -2,16 +2,27 @@ set -a -echo "DB_MIGRATIONS: ${DB_MIGRATIONS}" - if [ "${DB_MIGRATIONS}" = "true" ] then - echo "run db image migration and models initialization." + echo "🆗 running migrations" python manage.py migrate - echo "from django.contrib.auth.models import User; User.objects.create_superuser('${SUPER_USER_NAME}', '${SUPER_USER_EMAIL}', '${SUPER_USER_PASSWORD}')" | python manage.py shell + # We will recrate this with every launch. + # echo "TRUNCATE authorization_archiveddataaccessgrant;" | psql "${DATABASES_CUSTOM}" + + # Only create the root user if it doesn't exist. + result=$(echo "from django.contrib.auth.models import User; print(1) if User.objects.filter(username='${SUPER_USER_NAME}').exists() else print(0)" | python manage.py shell) + if [[ "$result" == "0" ]]; then + echo "🆗 creating ${} user." + echo "from django.contrib.auth.models import User; User.objects.create_superuser('${SUPER_USER_NAME}', '${SUPER_USER_EMAIL}', '${SUPER_USER_PASSWORD}')" | python manage.py shell + else + echo "🆗 ${SUPER_USER_NAME} already exists." + fi + python manage.py create_admin_groups + echo "🆗 loading scopes.json" python manage.py loaddata scopes.json + python manage.py create_blue_button_scopes python manage.py create_test_user_and_application python manage.py create_user_identification_label_selection From 9b1a03d34ee55389c7b3baa956df4244e842351d Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Tue, 18 Nov 2025 11:38:43 -0500 Subject: [PATCH 05/14] Improving local build. --- dev-local/.env.container | 101 +++++++++------------------- dev-local/Dockerfile.selenium-ecr | 22 ++++++ dev-local/Dockerfile.selenium-local | 18 +++++ dev-local/Makefile | 32 +++++---- dev-local/build-local.sh | 10 --- 5 files changed, 88 insertions(+), 95 deletions(-) create mode 100644 dev-local/Dockerfile.selenium-ecr create mode 100644 dev-local/Dockerfile.selenium-local delete mode 100755 dev-local/build-local.sh diff --git a/dev-local/.env.container b/dev-local/.env.container index eab10c3de..90bc776a4 100644 --- a/dev-local/.env.container +++ b/dev-local/.env.container @@ -1,81 +1,36 @@ ################################################################################ -# ABOUT THESE VALUES -# These values get sourced into `run-appropriate-stack` to do some work -# pre-launch, and then they are sourced into `start-local` inside the container -# to do *more* work +# CONTAINER ENV FILE +# +# 1. It is unclear if we need *all* of these variables in the container. Some, like the +# AWS values, should be double-checked. +# 2. We ALWAYS pass ALL variables through. At no point should defaults be set here. +# The idea is that we do any environment configuration in our initialization scripts, +# where we can have conditionals, etc. By the time we get to the container, we should +# be pulling the values directly from the env. This also eliminates a "moving part." +# If the variable is wrong, it MUST be wrong in the setup process, not here. +# 3. We should eliminate any further variable specialization in the app. That is, the app +# should not do any further `if/else` on these variables. It should pull them in without defaults. +# Or, any defaults should be of the form "BAD_VARIABLE_VALUE," or perhaps we should just exit. +# We want our application to fail at launch if we are missing critical variables. We do NOT want +# a production app to try and "figure things out" when its environment is not configured correctly. -################################################################################ -# DEVELOPER VARIABLES -# You may want to tweak these any given day of the week. - -## enable debugpy remote debugging (on port 5678) -# 20251113 MCJ This clearly works, but it does not seem to be used anywhere -# that would actually affect application behavior. :confused: -BB20_ENABLE_REMOTE_DEBUG="${BB20_ENABLE_REMOTE_DEBUG}" -BB20_REMOTE_DEBUG_WAIT_ATTACH="${BB20_REMOTE_DEBUG_WAIT_ATTACH}" -# Setting this to `false` will disable tests that run against the live BFD server. -# To run them locally, you need to be authenticated and on the VPN. -RUN_ONLINE_TESTS="${RUN_ONLINE_TESTS}" -# You probably shouldn't touch these. -DJANGO_FHIR_CERTSTORE="${DJANGO_FHIR_CERTSTORE}" - -################################################################################ -# AWS CREDENTIALS -# These values must be loaded into the local environment at the time the -# stack is started. That implies (perhaps) a `kion s` or similar has -# been executed before the `docker compose up`. +AWS_ACCESS_KEY_ID="${AWS_ACCESS_KEY_ID}" AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" AWS_SECRET_ACCESS_KEY="${AWS_SECRET_ACCESS_KEY}" -AWS_ACCESS_KEY_ID="${AWS_ACCESS_KEY_ID}" AWS_SESSION_TOKEN="${AWS_SESSION_TOKEN}" - -# Local superuser account -SUPER_USER_NAME="${SUPER_USER_NAME}" -SUPER_USER_PASSWORD="${SUPER_USER_PASSWORD}" -SUPER_USER_EMAIL="${SUPER_USER_EMAIL}" -# We run migrations *always* when running locally -DB_MIGRATIONS="${DB_MIGRATIONS}" -# This would be cryptographically secure in production. -DJANGO_SECRET_KEY="${DJANGO_SECRET_KEY}" - -# These need to be conditionally selected from -# by a launcher script. Define all of them here. -FHIR_URL="${FHIR_URL}" -FHIR_URL_V3="${FHIR_URL_V3}" - -################################################################################ -# DATABASE -# Shared DB ENV vars file for the "db" service containter. -################################################################################ -POSTGRES_DB="${POSTGRES_DB}" -POSTGRES_PASSWORD="${POSTGRES_PASSWORD}" -POSTGRES_PORT="${POSTGRES_PORT}" - -################################################################################ -# BLUE BUTTON ("THE APP") -# Configure the local containerized app for local execution. -################################################################################ -# Top level Djano settings -DJANGO_SETTINGS_MODULE="${DJANGO_SETTINGS_MODULE}" +BB2_SERVER_STD2FILE="${BB2_SERVER_STD2FILE}" +BB20_ENABLE_REMOTE_DEBUG="${BB20_ENABLE_REMOTE_DEBUG}" +BB20_REMOTE_DEBUG_WAIT_ATTACH="${BB20_REMOTE_DEBUG_WAIT_ATTACH}" DATABASES_CUSTOM="${DATABASES_CUSTOM}" -# We need this so that our local `http://localhost:8000/` URLs work in the test client/local stack. -OAUTHLIB_INSECURE_TRANSPORT="${OAUTHLIB_INSECURE_TRANSPORT}" -# This is read via `django-getenv` in base.py -DJANGO_SECURE_SESSION="${DJANGO_SECURE_SESSION}" -# This gets defaulted to true later. Go ahead and set the value. +DB_MIGRATIONS="${DB_MIGRATIONS}" +DJANGO_FHIR_CERTSTORE="${DJANGO_FHIR_CERTSTORE}" DJANGO_LOG_JSON_FORMAT_PRETTY="${DJANGO_LOG_JSON_FORMAT_PRETTY}" -# 20251113 MCJ This defaults to '' later, but could also be 'YES'. -# This outputs logs to a file locally. -BB2_SERVER_STD2FILE="${BB2_SERVER_STD2FILE}" - -# BFD credentials/settings -# 20251113 MCJ Using values that were defaulted in launch scripts. -# DJANGO_USER_ID_SALT="6E6F747468657265616C706570706572" -# DJANGO_USER_ID_ITERATIONS="2" - DJANGO_MEDICARE_SLSX_LOGIN_URI="${DJANGO_MEDICARE_SLSX_LOGIN_URI}" DJANGO_MEDICARE_SLSX_REDIRECT_URI="${DJANGO_MEDICARE_SLSX_REDIRECT_URI}" DJANGO_PASSWORD_HASH_ITERATIONS="${DJANGO_PASSWORD_HASH_ITERATIONS}" +DJANGO_SECRET_KEY="${DJANGO_SECRET_KEY}" +DJANGO_SECURE_SESSION="${DJANGO_SECURE_SESSION}" +DJANGO_SETTINGS_MODULE="${DJANGO_SETTINGS_MODULE}" DJANGO_SLSX_CLIENT_ID="${DJANGO_SLSX_CLIENT_ID}" DJANGO_SLSX_CLIENT_SECRET="${DJANGO_SLSX_CLIENT_SECRET}" DJANGO_SLSX_HEALTH_CHECK_ENDPOINT="${DJANGO_SLSX_HEALTH_CHECK_ENDPOINT}" @@ -85,3 +40,13 @@ DJANGO_SLSX_USERINFO_ENDPOINT="${DJANGO_SLSX_USERINFO_ENDPOINT}" DJANGO_SLSX_VERIFY_SSL_EXTERNAL="${DJANGO_SLSX_VERIFY_SSL_EXTERNAL}" DJANGO_USER_ID_ITERATIONS="${DJANGO_USER_ID_ITERATIONS}" DJANGO_USER_ID_SALT="${DJANGO_USER_ID_SALT}" +FHIR_URL_V3="${FHIR_URL_V3}" +FHIR_URL="${FHIR_URL}" +OAUTHLIB_INSECURE_TRANSPORT="${OAUTHLIB_INSECURE_TRANSPORT}" +POSTGRES_DB="${POSTGRES_DB}" +POSTGRES_PASSWORD="${POSTGRES_PASSWORD}" +POSTGRES_PORT="${POSTGRES_PORT}" +RUN_ONLINE_TESTS="${RUN_ONLINE_TESTS}" +SUPER_USER_EMAIL="${SUPER_USER_EMAIL}" +SUPER_USER_NAME="${SUPER_USER_NAME}" +SUPER_USER_PASSWORD="${SUPER_USER_PASSWORD}" diff --git a/dev-local/Dockerfile.selenium-ecr b/dev-local/Dockerfile.selenium-ecr new file mode 100644 index 000000000..5605fd29a --- /dev/null +++ b/dev-local/Dockerfile.selenium-ecr @@ -0,0 +1,22 @@ +FROM python:3.11 +# For build CBC Jenkins job ECR image +ENV PYTHONUNBUFFERED=1 + +RUN mkdir /code +ADD . /code/ +WORKDIR /code + +RUN pip install --upgrade pip +RUN apt-get update && apt-get install -yq git unzip curl + +# Install Chrome for Selenium +RUN curl https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb -o /chrome.deb \ + && dpkg -i /chrome.deb || apt-get install -yf \ + && rm /chrome.deb + +# hard code the zip URL here since `curl -sS chromedriver.storage.googleapis.com/LATEST_RELEASE` still points to 114 which is out of date +# this is the current way google publish the chrome drivers, going forward, need to make changes to keep up with the way google publish the +# drivers. +RUN wget -O /tmp/chromedriver.zip https://storage.googleapis.com/chrome-for-testing-public/131.0.6778.108/linux64/chromedriver-linux64.zip \ + && unzip -p /tmp/chromedriver.zip chromedriver-linux64/chromedriver > /usr/local/bin/chromedriver \ + && chmod +x /usr/local/bin/chromedriver diff --git a/dev-local/Dockerfile.selenium-local b/dev-local/Dockerfile.selenium-local new file mode 100644 index 000000000..c2757e9de --- /dev/null +++ b/dev-local/Dockerfile.selenium-local @@ -0,0 +1,18 @@ +FROM selenium/standalone-chromium + +USER root + +RUN apt-get update ; apt-get install -yq python3 python3-venv +RUN ln -s /usr/bin/python3 /usr/local/bin/python + +# switch to existing seluser from selenium docker +USER seluser + +ADD . /code +WORKDIR /code +RUN python -m venv /tmp/venv +RUN . /tmp/venv/bin/activate +ENV PATH="/tmp/venv/bin:${PATH}" + +RUN pip3 install --upgrade pip +RUN pip3 install selenium pytest debugpy jsonschema python-dateutil diff --git a/dev-local/Makefile b/dev-local/Makefile index c03f2a8a0..701bea7ca 100644 --- a/dev-local/Makefile +++ b/dev-local/Makefile @@ -1,26 +1,24 @@ all: build-local run-local build-local: - cd ../msls-local ; make all ; \ - cd ../dev-local ; \ - docker build \ + @echo "building mock sls image" + cd ../msls-local ; make all + @echo "building local blue button image" + cd ../dev-local ; docker build \ --platform "linux/amd64" \ -t bb-local:latest \ - -f Dockerfile.local .. ; \ - cd .. - -# https://stackoverflow.com/questions/2826029/passing-additional-variables-from-command-line-to-make - -# ./check-env-preconditions.bash ; \ -# source ./.env.local ; \ -# ./retrieve-certs.bash ; \ -# source retrieve-salt.bash ; \ -# ./check-env-postconditions.bash ; \ - - + -f Dockerfile.local .. +# TODO: Is this necessary in a local build? Probably not. +# @echo "building selenium ecr image" +# cd ../dev-local ; docker build \ +# --platform "linux/amd64" \ +# -t selenium-ecr:latest \ +# -f Dockerfile.selenium-ecr .. + cd ../dev-local ; docker build \ + --platform "linux/amd64" \ + -t selenium-local:latest \ + -f Dockerfile.selenium-local .. run-local: -# Environments in Makefiles are strange. Once we source, we have to run from the same line, basically. -# Also, you have to outdent comments to make sure they aren't passed to the shell. :sigh: @echo "Configuring for ${ENV}" ; \ ./run-appropriate-stack.bash \ No newline at end of file diff --git a/dev-local/build-local.sh b/dev-local/build-local.sh deleted file mode 100755 index b65b4ab0a..000000000 --- a/dev-local/build-local.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -# Exit if any command results in a non-zero exit status. -set -e - - -docker build \ - --platform "linux/amd64" \ - -t bb-local:latest \ - -f Dockerfile.local .. \ No newline at end of file From 7c18c9e732e5e27988cc5bc4ac598c2f28e291cc Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Tue, 18 Nov 2025 13:53:39 -0500 Subject: [PATCH 06/14] Fix where check happens --- dev-local/Makefile | 2 +- dev-local/check-pre-post.bash | 12 ++++++++++-- dev-local/retrieve-certs-and-salt.bash | 5 +++-- dev-local/run-appropriate-stack.bash | 4 ++++ dev-local/run-tests-in-both-environments.bash | 4 ++++ msls-local/Makefile | 2 +- 6 files changed, 23 insertions(+), 6 deletions(-) create mode 100644 dev-local/run-tests-in-both-environments.bash diff --git a/dev-local/Makefile b/dev-local/Makefile index 701bea7ca..d3984e6cd 100644 --- a/dev-local/Makefile +++ b/dev-local/Makefile @@ -2,7 +2,7 @@ all: build-local run-local build-local: @echo "building mock sls image" - cd ../msls-local ; make all + cd ../msls-local ; make all ; cd ../dev-local @echo "building local blue button image" cd ../dev-local ; docker build \ --platform "linux/amd64" \ diff --git a/dev-local/check-pre-post.bash b/dev-local/check-pre-post.bash index 16c603d06..f1460057f 100644 --- a/dev-local/check-pre-post.bash +++ b/dev-local/check-pre-post.bash @@ -18,9 +18,10 @@ check_valid_env () { echo "ENV must be set to 'local', 'test', or 'sbx'." echo "ENV is currently set to '${ENV}'." echo "Exiting." - exit -2 + return -2 fi + echo "✅ check_valid_env" } check_env_preconditions () { @@ -42,6 +43,12 @@ check_env_preconditions () { return -1 fi + echo "✅ check_env_preconditions" + +} + +check_env_after_source () { + if [ -z ${OAUTHLIB_INSECURE_TRANSPORT} ]; then echo "We need insecure transport when running locally." echo "OAUTHLIB_INSECURE_TRANSPORT was not set to true." @@ -54,5 +61,6 @@ check_env_preconditions () { echo "Exiting." return -1 fi -} + echo "✅ check_env_after_source" +} \ No newline at end of file diff --git a/dev-local/retrieve-certs-and-salt.bash b/dev-local/retrieve-certs-and-salt.bash index 862cda041..e5fbbd5c7 100755 --- a/dev-local/retrieve-certs-and-salt.bash +++ b/dev-local/retrieve-certs-and-salt.bash @@ -83,7 +83,7 @@ retrieve_certs () { --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_certificate${CERT_SUFFIX} \ --query 'SecretString' \ --output text | base64 -d > "${BB2_CERTSTORE}/ca.cert.pem" - + if [ $? -ne 0 ]; then echo "⛔ Failed to retrieve cert. Exiting." return -3 @@ -103,10 +103,11 @@ retrieve_certs () { declare -a cert_files=($CERT $KEY) for FILE in "${cert_files[@]}"; do - if [ -e "${BB2_CERTSTORE}/${FILE}" ]; then + if [ -s "${BB2_CERTSTORE}/${FILE}" ]; then echo " 🆗 '$FILE' exists." else echo " ⛔ '$FILE' does not exist." + echo " ⛔ Try exiting your 'kion' shell and re-authenticating." return -5 fi done diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash index 84bb7d16b..e03b7a084 100755 --- a/dev-local/run-appropriate-stack.bash +++ b/dev-local/run-appropriate-stack.bash @@ -7,6 +7,7 @@ set -a # exit on error. set -e + ###################################################################### # let's make sure we have a valid ENV var before proceeding check_valid_env @@ -17,6 +18,9 @@ check_valid_env # launching the app. source ./.env.local +# add another check or two after we source the env file. +check_env_after_source + ###################################################################### # let's make sure the .env.local sourced in correctly. check_env_preconditions diff --git a/dev-local/run-tests-in-both-environments.bash b/dev-local/run-tests-in-both-environments.bash new file mode 100644 index 000000000..b13bf069a --- /dev/null +++ b/dev-local/run-tests-in-both-environments.bash @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +export ENV=test +docker compose up --profile slsx \ No newline at end of file diff --git a/msls-local/Makefile b/msls-local/Makefile index 95ca1ba49..a1318dea6 100644 --- a/msls-local/Makefile +++ b/msls-local/Makefile @@ -1,7 +1,7 @@ all: build-local build-local: - cd msls-local ; docker build \ + docker build \ --platform "linux/amd64" \ -t msls-local:latest \ -f Dockerfile.msls . \ No newline at end of file From 80d336177cff4dcfa3265336274c14cdf4b30c1e Mon Sep 17 00:00:00 2001 From: Brandon Wang Date: Tue, 18 Nov 2025 12:54:09 -0600 Subject: [PATCH 07/14] adding extra around kion setup --- dev-local/README.md | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/dev-local/README.md b/dev-local/README.md index 176fa29e0..f2dce57aa 100644 --- a/dev-local/README.md +++ b/dev-local/README.md @@ -18,7 +18,14 @@ A Mac, out-of-the-box, should "just work," as well as an Intel-based Linux host. You should already have a `.kion.yaml` in your home directory. If not, follow the [local desktop development](https://confluence.cms.gov/spaces/BB2/pages/484224999/Local+Desktop+Development) onboarding docs to set up Cloudtamer/`kion`. -You need to add an alias for this tooling to work. +You need to add an alias in the previously mentioned .kion.yaml for this tooling to work. +Open .kion.yaml with something like: + +``` +code ~/.kion.yml +``` + +Then add the alias as: ``` favorites: From d86ebf5df12d081800e6802220285c3284fd9422 Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Wed, 19 Nov 2025 07:04:33 -0500 Subject: [PATCH 08/14] Add more checking, change params. --- dev-local/README.md | 40 +++- dev-local/check-pre-post.bash | 66 ------ dev-local/docker-compose-local.yaml | 13 +- dev-local/run-appropriate-stack.bash | 44 ++-- dev-local/start-local.sh | 16 +- ...s-and-salt.bash => utility-functions.bash} | 206 ++++++++++++++---- msls-local/start-local.sh | 3 + 7 files changed, 235 insertions(+), 153 deletions(-) delete mode 100644 dev-local/check-pre-post.bash rename dev-local/{retrieve-certs-and-salt.bash => utility-functions.bash} (52%) create mode 100755 msls-local/start-local.sh diff --git a/dev-local/README.md b/dev-local/README.md index f2dce57aa..33f536508 100644 --- a/dev-local/README.md +++ b/dev-local/README.md @@ -35,7 +35,13 @@ favorites: access_type: cli ``` -If you already have some aliases, you can just add this one to the list. The account number and cloud access role can be obtained from the Cloudtamer dashboard. The alias **must** be named `BB2-NON-PROD` for these tools to work. +If you already have some aliases, you can just add this one to the list. The account number and cloud access role can be obtained from the Cloudtamer dashboard. This is not strictly necessary, as the tooling cannot automate a `kion` call *and* then continue, as `kion` opens a new shell. However, it is much easier (and, for this documentation, the preferred method) to invoke + +``` +kion f BB2-NON-PROD +``` + +than to navigate a menu structure. You may ultimately choose a shorter alias, e.g. `kion f bnp`. ## to start @@ -61,23 +67,43 @@ The first step is to build the local containers. From the root of the tree, or f make build-local ``` -This should build the image `bb-local:latest`. +This should build the container image for `bb-local:latest`, the Selenium image, and the MSLS image. -Building the image is only necessary when the `requirements/requirements.txt` or `requirements/requirements.dev.txt` files change. Those requirements get baked into the image; changes to application code should be picked up via dynamic reload during normal development. +Building the images is only necessary when the `requirements/requirements.txt` or `requirements/requirements.dev.txt` files change. Those requirements get baked into the image; changes to application code should be picked up via dynamic reload during normal development. ## running the local image Next, run the stack. -### running locally / mocking MSLS +### running the stack -To run the stack locally, +There are four possible ways to run the stack. ``` -make run-local ENV=local +make run-local bfd= auth= ``` -This will launch the stack with no connection to live environments, and it will use the mocked MSLS tooling. +For example, to run against `test` with a live SLSX exchange: + +``` +make run-local bfd=test auth=live +``` + +Each combination has different implications. Only some make sense at this time (Nov '25): + +| bfd / auth | mock | live | +| --- | --- | --- | +| local | local unit tests | testing SLSX sequences | +| test | ⛔ | Full-up against `test` | +| sbx | ⛔ | Full-up against `sbx` | + +* `local/mock`: This makes sense for running unit tests; only local tests will run in this configuration. +* `local/live`: Manual testing of SLSX sequences should be able to be performed with this combination (TBD) +* `test/mock`: Not a valid condition; a mock authentication will not work with a live server. +* `test/live`: Live SLSX exchanges with medicare.gov and calls against the `test` BFD environment. +* `sbx/mock`: Not a valid condition. +* `sbx/live`: Live SLSX exchanges and calls against the `sbx` BFD environment. + ### running against `test` diff --git a/dev-local/check-pre-post.bash b/dev-local/check-pre-post.bash deleted file mode 100644 index f1460057f..000000000 --- a/dev-local/check-pre-post.bash +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env bash - -check_valid_env () { - if [[ "${ENV}" == "local" ]]; then - # This is a no-op. - : - ##### - # TEST - elif [[ "${ENV}" == "test" ]]; then - : - ##### - # SBX - elif [[ "${ENV}" == "sbx" ]]; then - : - ##### - # ERR - else - echo "ENV must be set to 'local', 'test', or 'sbx'." - echo "ENV is currently set to '${ENV}'." - echo "Exiting." - return -2 - fi - - echo "✅ check_valid_env" -} - -check_env_preconditions () { - if [ "${ENV}" != "local" ]; then - if [ -z ${KION_ACCOUNT_ALIAS} ]; then - echo "You must run 'kion f BB2_NON_PROD' before 'make run ENV=test'." - echo "Exiting." - return -1 - fi - fi - - # https://stackoverflow.com/questions/3601515/how-to-check-if-a-variable-is-set-in-bash - if [ -z ${ENV} ]; then - echo "ENV not set. Cannot retrieve certs." - echo "ENV must be one of 'local', 'test', or 'sbx'." - echo "For example:" - echo " make run-local ENV=test" - echo "Exiting." - return -1 - fi - - echo "✅ check_env_preconditions" - -} - -check_env_after_source () { - - if [ -z ${OAUTHLIB_INSECURE_TRANSPORT} ]; then - echo "We need insecure transport when running locally." - echo "OAUTHLIB_INSECURE_TRANSPORT was not set to true." - echo "Exiting." - return -1 - fi - - if [ -z ${DB_MIGRATIONS} ]; then - echo "There should be a DB_MIGRATIONS flag." - echo "Exiting." - return -1 - fi - - echo "✅ check_env_after_source" -} \ No newline at end of file diff --git a/dev-local/docker-compose-local.yaml b/dev-local/docker-compose-local.yaml index d8c3aad9b..ef1fe1a33 100644 --- a/dev-local/docker-compose-local.yaml +++ b/dev-local/docker-compose-local.yaml @@ -14,34 +14,29 @@ services: # A small Flask app for testing. msls: image: msls-local:latest - command: python app.py + platform: linux/amd64 + command: /code/start-local.sh ports: - "8080:8080" volumes: - ../msls-local:/code - profiles: - - mocksls - - slsx #################### # Blue Button web: image: bb-local:latest - command: /start-local.sh + platform: linux/amd64 + command: /code/dev-local/start-local.sh env_file: - .env.container volumes: - ..:/code - ~/.bb2/certstore:/certstore - - ./start-local.sh:/start-local.sh ports: - "8000:8000" - "5678:5678" depends_on: - db - msls - profiles: - - slsx - platform: linux/amd64 # web_msls: # build: . diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash index e03b7a084..aaef2c374 100755 --- a/dev-local/run-appropriate-stack.bash +++ b/dev-local/run-appropriate-stack.bash @@ -1,35 +1,34 @@ #!/usr/bin/env bash -source ./check-pre-post.bash -source ./retrieve-certs-and-salt.bash +source ./utility-functions.bash # this says to "export all variables." set -a # exit on error. set -e +# bfd = local | test | sbx +# auth = mock | live -###################################################################### # let's make sure we have a valid ENV var before proceeding check_valid_env -###################################################################### # source the baseline environment variables # these set the stage for all further environment manipulation for # launching the app. +clear_canary_variables source ./.env.local # add another check or two after we source the env file. check_env_after_source -###################################################################### # let's make sure the .env.local sourced in correctly. check_env_preconditions # set the FHIR_URL and FHIR_URL_V3 -set_fhir_urls +set_bfd_urls # set the profile for docker compose -set_profile +set_auth_profile # retrieve the certs and store them in $HOME/.bb2/certstore retrieve_certs @@ -38,33 +37,20 @@ set_salt echo "🚀 Launching the stack for '${ENV}'." -echo "FHIR_URLs are:" -echo " * ${FHIR_URL}" -echo " * ${FHIR_URL_V3}" - -DOCKER_PS=$(docker ps -q) -echo $DOCKER_PS - -TAKE_IT_DOWN="NO" -for id in $DOCKER_PS; do - NAME=$(docker inspect --format '{{.Config.Image}}' $id) - if [[ "${NAME}" =~ "postgres" ]]; then - echo "🤔 I think things are still running. Bringing the stack down." - TAKE_IT_DOWN="YES" - fi -done - -if [ "${TAKE_IT_DOWN}" = "YES" ]; then - for id in $DOCKER_PS; do - echo "🛑 Stopping container $id" - docker stop $id - done +if [[ "${bfd}" == "local" ]]; then + echo "🥶 FHIR_URLs are not set when running locally." + echo " BFD calls will fail." +else + echo "FHIR_URLs are:" + echo " * ${FHIR_URL}" + echo " * ${FHIR_URL_V3}" fi +cleanup_docker_stack + echo "📊 Vernier start." echo docker compose \ - --profile "${PROFILE}" \ -f docker-compose-local.yaml \ up \ No newline at end of file diff --git a/dev-local/start-local.sh b/dev-local/start-local.sh index e45fed5f5..a60cf4cee 100755 --- a/dev-local/start-local.sh +++ b/dev-local/start-local.sh @@ -4,7 +4,7 @@ set -a if [ "${DB_MIGRATIONS}" = "true" ] then - echo "🆗 running migrations" + echo "🔵 running migrations" python manage.py migrate # We will recrate this with every launch. @@ -13,21 +13,29 @@ then # Only create the root user if it doesn't exist. result=$(echo "from django.contrib.auth.models import User; print(1) if User.objects.filter(username='${SUPER_USER_NAME}').exists() else print(0)" | python manage.py shell) if [[ "$result" == "0" ]]; then - echo "🆗 creating ${} user." echo "from django.contrib.auth.models import User; User.objects.create_superuser('${SUPER_USER_NAME}', '${SUPER_USER_EMAIL}', '${SUPER_USER_PASSWORD}')" | python manage.py shell + echo "🆗 created ${SUPER_USER_NAME} user." else echo "🆗 ${SUPER_USER_NAME} already exists." fi python manage.py create_admin_groups - echo "🆗 loading scopes.json" + echo "🆗 create_admin_groups" + python manage.py loaddata scopes.json + echo "🆗 loaddata scopes.json" python manage.py create_blue_button_scopes + echo "🆗 create_blue_button_scopes" + python manage.py create_test_user_and_application + echo "🆗 create_test_user_and_application" + python manage.py create_user_identification_label_selection - echo "creating feature switches......" + echo "🆗 create_user_identification_label_selection" + python manage.py create_test_feature_switches + echo "🆗 create_test_feature_switches" else echo "restarting blue button server, no db image migration and models initialization will run here, you might need to manually run DB image migrations." fi diff --git a/dev-local/retrieve-certs-and-salt.bash b/dev-local/utility-functions.bash similarity index 52% rename from dev-local/retrieve-certs-and-salt.bash rename to dev-local/utility-functions.bash index e5fbbd5c7..201443620 100755 --- a/dev-local/retrieve-certs-and-salt.bash +++ b/dev-local/utility-functions.bash @@ -1,63 +1,161 @@ #!/usr/bin/env bash -# This variable determines if we're going to fetch -# cert/salt values from the secret manager. -# We assume yes, but set it to `no` when running fully locally. -export CERT_AND_SALT="YES" +######################################## +# check_valid_env +# Makes sure we have one of the three valid +# execution environments. +check_valid_env () { + if [[ "${bfd}" == "local" ]]; then + # This is a no-op. + : + ##### + # TEST + elif [[ "${bfd}" == "test" ]]; then + : + ##### + # SBX + elif [[ "${bfd}" == "sbx" ]]; then + : + ##### + # ERR + else + echo "'bfd' must be set to 'local', 'test', or 'sbx'." + echo "'bfd' is currently set to '${bfd}'." + echo "Exiting." + return -2 + fi + + echo "✅ check_valid_env" +} + +######################################## +# clear_canary_variables +# We want one or two variables that we know will be obtained +# via sourcing the .env. Unset them first. +clear_canary_variables () { + unset OATHLIB_INSECURE_TRANSPORT + unset DB_MIGRATIONS +} -set_fhir_urls () { - echo "✅ set_fhir_urls" +######################################## +# check_env_preconditions +# Certain minimal things must be true in order to proceed. +check_env_preconditions () { + if [ "${bfd}" != "local" ]; then + if [ -z ${KION_ACCOUNT_ALIAS} ]; then + echo "You must run 'kion f ' before 'make run bfd=${bfd}'." + echo "Exiting." + return -1 + fi + fi + + # https://stackoverflow.com/questions/3601515/how-to-check-if-a-variable-is-set-in-bash + if [ -z ${bfd} ]; then + echo "'bfd' not set. Cannot retrieve certs." + echo "'bfd' must be one of 'local', 'test', or 'sbx'." + echo "For example:" + echo " make run-local bfd=test" + echo "Exiting." + return -1 + fi + + echo "✅ check_env_preconditions" + +} + +######################################## +# check_env_after_source +# After sourcing in the .env, we need to make sure that one or two +# variables are now present that would not have been otherwise. +check_env_after_source () { + + if [ -z ${OAUTHLIB_INSECURE_TRANSPORT} ]; then + echo "We need insecure transport when running locally." + echo "OAUTHLIB_INSECURE_TRANSPORT was not set to true." + echo "Something went badly wrong." + echo "Exiting." + return -1 + fi + + if [ -z ${DB_MIGRATIONS} ]; then + echo "There should be a DB_MIGRATIONS flag." + echo "Something went badly wrong." + echo "Exiting." + return -1 + fi + + echo "✅ check_env_after_source" +} - if [[ "${ENV}" == "local" ]]; then - echo "🆗 No FHIR URLs set for local testing." +######################################## +# set_bfd_urls +# Make sure we have the right BFD URLs for testing against. +set_bfd_urls () { + ##### + # LOCAL + if [[ "${bfd}" == "local" ]]; then + echo "⚠️ No FHIR URLs set for local testing." + echo " There are no mock BFD endpoints for local testing at this time." ##### # TEST - elif [[ "${ENV}" == "test" ]]; then + elif [[ "${bfd}" == "test" ]]; then export FHIR_URL="${FHIR_URL_TEST}" export FHIR_URL_V3="${FHIR_URL_V3_TEST}" - ##### # SBX - elif [[ "${ENV}" == "sbx" ]]; then + elif [[ "${bfd}" == "sbx" ]]; then export FHIR_URL="${FHIR_URL_SBX}" export FHIR_URL_V3="${FHIR_URL_V3_SBX}" fi -} -set_profile () { - echo "✅ set_profile" + echo "✅ set_bfd_urls" +} - if [[ "${ENV}" == "local" ]]; then +######################################## +# set_auth_profile +# This sets the variables that determine if we will +# auth locally (mock) or against a live server. +set_auth_profile () { + if [[ "${bfd}" == "local" ]]; then export PROFILE="mock-sls" ##### # TEST - elif [[ "${ENV}" == "test" ]]; then + elif [[ "${bfd}" == "test" ]]; then export PROFILE="slsx" ##### # SBX - elif [[ "${ENV}" == "sbx" ]]; then + elif [[ "${bfd}" == "sbx" ]]; then export PROFILE="slsx" fi + + echo "✅ set_profile" } -retrieve_certs () { - echo "✅ retrieve_certs" +######################################## +# retrieve_certs +# Download the certs from the secrets store. +# Put them in a "BB2 config directory" in the developer's +# home directory. This keeps them out of the tree. - if [[ "${ENV}" == "local" ]]; then +# This variable determines if we're going to fetch +# cert/salt values from the secret manager. +# We assume yes, but set it to `no` when running fully locally. +export CERT_AND_SALT="YES" + +retrieve_certs () { + if [[ "${bfd}" == "local" ]]; then echo "🆗 Running locally. Not retrieving certs." echo "🆗 Running locally. Not retrieving salt." CERT_AND_SALT="NO" export CERT_SUFFIX="" - export DJANGO_USER_ID_SALT="6E6F747468657265616C706570706572" - export DJANGO_USER_ID_ITERATIONS="2" ##### # TEST - elif [[ "${ENV}" == "test" ]]; then + elif [[ "${bfd}" == "test" ]]; then export CERT_SUFFIX="_test" export PROFILE="slsx" ##### # SBX - elif [[ "${ENV}" == "sbx" ]]; then + elif [[ "${bfd}" == "sbx" ]]; then export CERT_SUFFIX="" export PROFILE="slsx" fi @@ -78,7 +176,7 @@ retrieve_certs () { rm -f "${BB2_CERTSTORE}/$CERT" rm -f "${BB2_CERTSTORE}/$KEY" - echo "🎁 Retrieving certs for the '${ENV}' environment with suffix '${CERT_SUFFIX}'." + echo "🎁 Retrieving certs for the '${bfd}' environment with suffix '${CERT_SUFFIX}'." aws secretsmanager get-secret-value \ --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_certificate${CERT_SUFFIX} \ --query 'SecretString' \ @@ -115,14 +213,18 @@ retrieve_certs () { chmod 600 "${BB2_CERTSTORE}/ca.cert.pem" chmod 600 "${BB2_CERTSTORE}/ca.key.nocrypt.pem" - echo "🆗 Retrieved cert and key for '${ENV}'." fi + + echo "✅ retrieve_certs" } +######################################## +# set_salt +# The other half of retrieve_certs. Sets up additional +# variables for secure communication with auth servers +# (or helps set up the mock). set_salt () { - echo "✅ set_salt" - - if [ "${ENV}" = "local" ]; then + if [ "${bfd}" = "local" ]; then echo "🆗 Running locally. Not retrieving salt." export DJANGO_USER_ID_SALT="6E6F747468657265616C706570706572" export DJANGO_USER_ID_ITERATIONS="2" @@ -139,13 +241,13 @@ set_salt () { DJANGO_SLSX_VERIFY_SSL_INTERNAL="False" return 0 - elif [ "${ENV}" = "test" ]; then - echo "🆗 Retrieving salt/client values for '${ENV}'." - elif [ "${ENV}" = "sbx" ]; then - echo "🆗 Retrieving salt/client values for '${ENV}'." + elif [ "${bfd}" = "test" ]; then + echo "🆗 Retrieving salt/client values for '${bfd}'." + elif [ "${bfd}" = "sbx" ]; then + echo "🆗 Retrieving salt/client values for '${bfd}'." else - echo "⛔ ENV must be set to 'test' or 'sbx'." - echo " ENV is currently set to '${ENV}'." + echo "⛔ bfd must be set to 'test' or 'sbx'." + echo " bfd is currently set to '${bfd}'." echo " Exiting." return -2 fi @@ -166,12 +268,40 @@ set_salt () { export DJANGO_SLSX_USERINFO_ENDPOINT="https://test.accounts.cms.gov/v1/users" # SLSx credentials - export DJANGO_SLSX_CLIENT_ID=bb2api - export DJANGO_SLSX_CLIENT_SECRET=${DJANGO_SLSX_CLIENT_SECRET} + export DJANGO_SLSX_CLIENT_ID="bb2api" + export DJANGO_SLSX_CLIENT_SECRET="${DJANGO_SLSX_CLIENT_SECRET}" # SSL verify for internal endpoints can't currently use SSL verification (this may change in the future) export DJANGO_SLSX_VERIFY_SSL_INTERNAL="False" export DJANGO_SLSX_VERIFY_SSL_EXTERNAL="True" - echo "🆗 Retrieved salt values." + echo "✅ set_salt" +} + +######################################## +# cleanup_docker_stack +# We can't run the stack twice. (Or, it isn't configured to run twice +# with this tooling *yet*.) This walks the open images and closes anything +# that looks like ours. It doesn't *really* know, so if you are doing other work, +# this will probably close things. In short: if you have a `postgres` container, this +# function will try and stop ALL docker containers. +cleanup_docker_stack () { + DOCKER_PS=$(docker ps -q) + echo $DOCKER_PS + + TAKE_IT_DOWN="NO" + for id in $DOCKER_PS; do + NAME=$(docker inspect --format '{{.Config.Image}}' $id) + if [[ "${NAME}" =~ "postgres" ]]; then + echo "🤔 I think things are still running. Bringing the stack down." + TAKE_IT_DOWN="YES" + fi + done + + if [ "${TAKE_IT_DOWN}" = "YES" ]; then + for id in $DOCKER_PS; do + echo "🛑 Stopping container $id" + docker stop $id + done + fi } \ No newline at end of file diff --git a/msls-local/start-local.sh b/msls-local/start-local.sh new file mode 100755 index 000000000..2bab81c4a --- /dev/null +++ b/msls-local/start-local.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +python app.py \ No newline at end of file From 84be102e21f30132c0638700044d581b382d5cd2 Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Thu, 20 Nov 2025 09:38:04 -0500 Subject: [PATCH 09/14] Updated/all tests passing --- apps/authorization/models.py | 11 +- .../commands/create_test_feature_switches.py | 4 +- apps/dot_ext/tests/test_views.py | 49 +++--- .../create_test_user_and_application.py | 162 ++++++++++++------ ...reate_test_users_and_applications_batch.py | 40 +++-- dev-local/.env.container | 1 + dev-local/.env.local.example | 2 + dev-local/README.md | 2 + dev-local/diffs | 0 dev-local/fetch-and-update-css.bash | 17 ++ dev-local/run-appropriate-stack.bash | 19 +- dev-local/start-local.sh | 15 +- dev-local/utility-functions.bash | 3 +- hhs_oauth_server/settings/base.py | 33 ++++ 14 files changed, 242 insertions(+), 116 deletions(-) create mode 100644 dev-local/diffs create mode 100644 dev-local/fetch-and-update-css.bash diff --git a/apps/authorization/models.py b/apps/authorization/models.py index ee6d5142c..e37d2adfd 100644 --- a/apps/authorization/models.py +++ b/apps/authorization/models.py @@ -93,10 +93,13 @@ def update_grants(*args, **kwargs): tokens = AccessToken.objects.all() for token in tokens: if token.is_valid(): - DataAccessGrant.objects.get_or_create( - beneficiary=token.user, - application=token.application, - ) + try: + DataAccessGrant.objects.get_or_create( + beneficiary=token.user, + application=token.application, + ) + except Exception as e: + print(f"update_grants: {e}") def check_grants(): diff --git a/apps/core/management/commands/create_test_feature_switches.py b/apps/core/management/commands/create_test_feature_switches.py index 5746a1d86..20013195a 100644 --- a/apps/core/management/commands/create_test_feature_switches.py +++ b/apps/core/management/commands/create_test_feature_switches.py @@ -34,7 +34,7 @@ def handle(self, *args, **options): for switch in WAFFLE_FEATURE_SWITCHES: try: Switch.objects.get(name=switch[0]) - self._log("Feature switch already exists: %s" % (str(switch))) + # self._log("Feature switch already exists: %s" % (str(switch))) except Switch.DoesNotExist: Switch.objects.create(name=switch[0], active=switch[1], note=switch[2]) self._log("Feature switch created: %s" % (str(switch))) @@ -46,7 +46,7 @@ def handle(self, *args, **options): try: flag_obj = Flag.objects.get(name=flag[0]) - self._log("Feature flag already exists: %s" % (str(flag_obj))) + # self._log("Feature flag already exists: %s" % (str(flag_obj))) except Flag.DoesNotExist: flag_obj = Flag.objects.create(name=flag[0]) self._log("Feature flag created: %s" % (str(flag[0]))) diff --git a/apps/dot_ext/tests/test_views.py b/apps/dot_ext/tests/test_views.py index 6171285bd..477f8efa4 100644 --- a/apps/dot_ext/tests/test_views.py +++ b/apps/dot_ext/tests/test_views.py @@ -24,6 +24,8 @@ SCOPES_TO_URL_BASE_PATH, ) +import os + from hhs_oauth_server.settings.base import MOCK_FHIR_ENDPOINT_HOSTNAME @@ -576,16 +578,15 @@ def test_delete_token_success(self): # This assertion is incorrectly crafted - it actually requires a local server started # so that the fhir fetch data is called and hence generate cert file not found error. - # TODO: refactor test to not depend on a server up and running. - - # Post Django 2.2: An OSError exception is expected when trying to reach the - # backend FHIR server and proves authentication worked. - with self.assertRaisesRegexp( - OSError, 'Could not find the TLS certificate file' - ): - response = self.client.get( - '/v1/fhir/Patient', headers={'authorization': 'Bearer ' + anna_token.token} - ) + # 20251120 This test is now gated on a variable; if the variable does not exist, or + # is not set, the test will run. This is the desired behavior. + if os.getenv("RUNNING_IN_LOCAL_STACK", None) != "true": + with self.assertRaisesRegexp( + OSError, 'Could not find the TLS certificate file' + ): + response = self.client.get( + '/v1/fhir/Patient', headers={'authorization': 'Bearer ' + anna_token.token} + ) bob_tkn = self._create_test_token(bob, bob_application) self.assertTrue( @@ -638,24 +639,26 @@ def test_delete_token_success(self): # Post Django 2.2: An OSError exception is expected when trying to reach the # backend FHIR server and proves authentication worked. - with self.assertRaisesRegexp( - OSError, 'Could not find the TLS certificate file' - ): - response = self.client.get( - '/v1/fhir/Patient', headers={'authorization': 'Bearer ' + bob_tkn.token} - ) + if os.getenv("RUNNING_IN_LOCAL_STACK", None) != "true": + with self.assertRaisesRegexp( + OSError, 'Could not find the TLS certificate file' + ): + response = self.client.get( + '/v1/fhir/Patient', headers={'authorization': 'Bearer ' + bob_tkn.token} + ) next_tkn = self._create_test_token(anna, anna_application) # Post Django 2.2: An OSError exception is expected when trying to reach the # backend FHIR server and proves authentication worked. - with self.assertRaisesRegexp( - OSError, 'Could not find the TLS certificate file' - ): - response = self.client.get( - '/v1/fhir/Patient', - headers={'authorization': 'Bearer ' + next_tkn.token}, - ) + if os.getenv("RUNNING_IN_LOCAL_STACK", None) != "true": + with self.assertRaisesRegexp( + OSError, 'Could not find the TLS certificate file' + ): + response = self.client.get( + '/v1/fhir/Patient', + headers={'authorization': 'Bearer ' + next_tkn.token}, + ) # self.assertEqual(next_tkn.token, tkn.token) self.assertTrue( diff --git a/apps/testclient/management/commands/create_test_user_and_application.py b/apps/testclient/management/commands/create_test_user_and_application.py index c32985bb0..f2e778973 100644 --- a/apps/testclient/management/commands/create_test_user_and_application.py +++ b/apps/testclient/management/commands/create_test_user_and_application.py @@ -12,6 +12,10 @@ from datetime import timedelta, datetime from django.conf import settings from apps.authorization.models import update_grants +from apps.authorization.models import ArchivedDataAccessGrant, DataAccessGrant + +# Imports for quieting things during startup. +from waffle.models import Switch def create_group(name="BlueButton"): @@ -24,57 +28,95 @@ def create_group(name="BlueButton"): return g +def get_switch(name): + try: + sw = Switch.objects.get(name=name) + return sw.active + except Exception as e: + print(f"Could not get switch {name}: {e}") + + +def set_switch(name, b): + # DISABLE SOME WAFFLE SWITCHES + # We don't want email, etc. + sw, _ = Switch.objects.get_or_create(name=name) + sw.active = b + sw.save() + +# usr would be a string if it is anything + + def create_user(group, usr): - u_name = "fred" - first_name = "Fred" - last_name = "Flinstone" - email = "fred@example.com" - password = "foobarfoobarfoobar" + u_name = "rogersf" + first_name = "Fred" + last_name = "Rogers" + email = "fred@landofmakebelieve.gov" + password = "danielthetiger" user_type = "BEN" - + if usr is not None: u_name = usr - first_name = "{}{}".format(usr, "First") + first_name = "{}{}".format(usr, "First") last_name = "{}{}".format(usr, "Last") - email = "{}.{}@example.com".format(first_name, last_name) + email = "{}.{}@{}".format(first_name, last_name, email) user_type = "DEV" + # This violates constraints on other tables. + usr_q = User.objects.filter(username=u_name) + if usr_q.exists(): + # Delete any ADAGs for this user, or we will run into a + # constraint issue at startup. + count = ArchivedDataAccessGrant.objects.filter(beneficiary=usr_q.first()).delete() + print(f"Deleted {count} ADAGs for {u_name}") + count = DataAccessGrant.objects.filter(beneficiary=usr_q.first()).delete() + print(f"Deleted {count} ADAGs for {u_name}") - if User.objects.filter(username=u_name).exists(): User.objects.filter(username=u_name).delete() u = None if usr is not None: - u = User.objects.create_user(username=u_name, - first_name=first_name, - last_name=last_name, - email=email) - u.set_unusable_password() + try: + u, _ = User.objects.get_or_create(username=u_name, + first_name=first_name, + last_name=last_name, + email=email, + signals_to_disable=["post_save"]) + u.set_unusable_password() + except Exception as e: + print(f"Did not create user: {e}") else: # create a sample user 'fred' for dev local that has a usable password - u = User.objects.create_user(username=u_name, - first_name=first_name, - last_name=last_name, - email=email, - password=password,) - - UserProfile.objects.create(user=u, - user_type=user_type, - create_applications=True, - password_reset_question_1='1', - password_reset_answer_1='blue', - password_reset_question_2='2', - password_reset_answer_2='Frank', - password_reset_question_3='3', - password_reset_answer_3='Bentley') - - u.groups.add(group) - - if usr is None: - c, g_o_c = Crosswalk.objects.get_or_create(user=u, - fhir_id_v2=settings.DEFAULT_SAMPLE_FHIR_ID_V2, - _user_id_hash="ee78989d1d9ba0b98f3cfbd52479f10c7631679c17563186f70fbef038cc9536") + try: + # get_or_create returns a tuple (v, bool) + u, _ = User.objects.get_or_create(username=u_name, + first_name=first_name, + last_name=last_name, + email=email, + password=password,) + + UserProfile.objects.create(user=u, + user_type=user_type, + create_applications=True, + password_reset_question_1='1', + password_reset_answer_1='blue', + password_reset_question_2='2', + password_reset_answer_2='Frank', + password_reset_question_3='3', + password_reset_answer_3='Bentley') + except Exception as e: + print(f"Did not create user and profile: {e}") + + if u is None: + print(f"Error creating user; exiting.") + else: + u.groups.add(group) + + user_id_hash = "ee78989d1d9ba0b98f3cfbd52479f10c7631679c17563186f70fbef038cc9536" + Crosswalk.objects.filter(_user_id_hash=user_id_hash).delete() + c, _ = Crosswalk.objects.get_or_create(user=u, + fhir_id_v2=settings.DEFAULT_SAMPLE_FHIR_ID_V2, + _user_id_hash=user_id_hash) return u @@ -86,26 +128,29 @@ def create_application(user, group, app, redirect): if redirect: redirect_uri = redirect - if not(redirect_uri.startswith("http://") or redirect_uri.startswith("https://")): + if not (redirect_uri.startswith("http://") or redirect_uri.startswith("https://")): redirect_uri = "https://" + redirect_uri - a = Application.objects.create(name=app_name, - redirect_uris=redirect_uri, - user=user, - data_access_type="THIRTEEN_MONTH", - client_type="confidential", - authorization_grant_type="authorization-code") + try: + a = Application.objects.create(name=app_name, + redirect_uris=redirect_uri, + user=user, + data_access_type="THIRTEEN_MONTH", + client_type="confidential", + authorization_grant_type="authorization-code",) - titles = ["My Medicare and supplemental coverage information.", - "My Medicare claim information.", - "My general patient and demographic information.", - "Profile information including name and email." - ] + titles = ["My Medicare and supplemental coverage information.", + "My Medicare claim information.", + "My general patient and demographic information.", + "Profile information including name and email." + ] - for t in titles: - c = ProtectedCapability.objects.get(title=t) - a.scope.add(c) - return a + for t in titles: + c = ProtectedCapability.objects.get(title=t) + a.scope.add(c) + return a + except Exception as e: + print(f"Skipped creation of {app_name}: {e}") def create_test_token(user, application): @@ -121,7 +166,8 @@ def create_test_token(user, application): t = AccessToken.objects.create(user=user, application=application, token="sample-token-string", expires=expires, - scope=' '.join(scope)) + scope=' '.join(scope),) + return t @@ -134,12 +180,15 @@ def add_arguments(self, parser): parser.add_argument("-r", "--redirect", help="Redirect url of the application.") def handle(self, *args, **options): - usr = options["user"] - app = options["app"] + usr = options.get("user", None) + app = options.get("app", None) redirect = options["redirect"] + set_switch('outreach_email', False) + g = create_group() u = create_user(g, usr) + print(f"Created user {u}") a = create_application(u, g, app, redirect) t = None if usr is None and app is None: @@ -150,3 +199,6 @@ def handle(self, *args, **options): print("client_secret:", a.client_secret) print("access_token:", t.token if t else "None") print("redirect_uri:", a.redirect_uris) + + # Restore switch to whatever it was. + set_switch('outreach_email', True) diff --git a/apps/testclient/management/commands/create_test_users_and_applications_batch.py b/apps/testclient/management/commands/create_test_users_and_applications_batch.py index 0b3b534d3..26bb590f9 100755 --- a/apps/testclient/management/commands/create_test_users_and_applications_batch.py +++ b/apps/testclient/management/commands/create_test_users_and_applications_batch.py @@ -49,6 +49,8 @@ def create_group(name="BlueButton"): return g # To avoid naming collisions when running this command more than once + + def get_first_available_number(firstname): try: latest = User.objects.filter( @@ -59,6 +61,7 @@ def get_first_available_number(firstname): begin = ''.join(x for x in latest.first_name if x.isdigit()) return int(begin) + 1 + def create_dev_users_apps_and_bene_crosswalks( group, bene_count, @@ -171,7 +174,7 @@ def create_dev_users_apps_and_bene_crosswalks( app_index += 1 app_name = "app{}_{}".format(i, u) redirect_uri = "{}/testclient_{}/callback".format(settings.HOSTNAME_URL, app_name) - if not(redirect_uri.startswith("http://") or redirect_uri.startswith("https://")): + if not (redirect_uri.startswith("http://") or redirect_uri.startswith("https://")): redirect_uri = "https://" + redirect_uri # 2% inactive, 5% opt out demo scopes # 10% public/implicit 90% confidential/authorization-code @@ -278,7 +281,7 @@ def create_test_access_refresh_archived_objects( for i in range(refresh_count): rt = RefreshToken.objects.create(user=user, application=application, - token=uuid.uuid4().hex) + token=uuid.uuid4().hex) rt.created = at.created rt.save() print("<<< " + user.username + " refresh token " + str(i) + " generated") @@ -286,13 +289,13 @@ def create_test_access_refresh_archived_objects( # archived token: created, updated, archived_at datetime fields for i in range(archived_token_count): ot = ArchivedToken.objects.create(user=user, - application=application, - token=uuid.uuid4().hex, - expires=expires.replace(tzinfo=pytz.utc), - created=at.created, - updated=at.created, - archived_at=at.created, - scope=scope) + application=application, + token=uuid.uuid4().hex, + expires=expires.replace(tzinfo=pytz.utc), + created=at.created, + updated=at.created, + archived_at=at.created, + scope=scope) date_archived = ot.created + timedelta(days=10) ot.archived_at = date_archived.replace(tzinfo=pytz.utc) @@ -301,14 +304,17 @@ def create_test_access_refresh_archived_objects( past_date = timezone.now() - timedelta(days=2) for i in range(archived_grant_count): - adag = ArchivedDataAccessGrant.objects.create(beneficiary=user, - application=application, - expiration_date=past_date, - created_at=past_date - timedelta(days=2), - archived_at=past_date) - past_date = past_date - timedelta(days=2) - adag.save() - print("<<< " + user.username + "archived grant " + str(i) + " generated") + try: + adag = ArchivedDataAccessGrant.objects.create(beneficiary=user, + application=application, + expiration_date=past_date, + created_at=past_date - timedelta(days=2), + archived_at=past_date) + past_date = past_date - timedelta(days=2) + adag.save() + print("<<< " + user.username + "archived grant " + str(i) + " generated") + except Exception as e: + print(f"Skipped creating grant number {i} due to DB conflict: {e}") class Command(BaseCommand): diff --git a/dev-local/.env.container b/dev-local/.env.container index 90bc776a4..d4cce118a 100644 --- a/dev-local/.env.container +++ b/dev-local/.env.container @@ -47,6 +47,7 @@ POSTGRES_DB="${POSTGRES_DB}" POSTGRES_PASSWORD="${POSTGRES_PASSWORD}" POSTGRES_PORT="${POSTGRES_PORT}" RUN_ONLINE_TESTS="${RUN_ONLINE_TESTS}" +RUNNING_IN_LOCAL_STACK="${RUNNING_IN_LOCAL_STACK}" SUPER_USER_EMAIL="${SUPER_USER_EMAIL}" SUPER_USER_NAME="${SUPER_USER_NAME}" SUPER_USER_PASSWORD="${SUPER_USER_PASSWORD}" diff --git a/dev-local/.env.local.example b/dev-local/.env.local.example index 5b17cb3fe..9a4f82515 100644 --- a/dev-local/.env.local.example +++ b/dev-local/.env.local.example @@ -2,6 +2,8 @@ # DEVELOPER VARIABLES # You may want to tweak these any given day of the week. +RUNNING_IN_LOCAL_STACK=true + ## enable debugpy remote debugging (on port 5678) # 20251113 MCJ This clearly works, but it does not seem to be used anywhere # that would actually affect application behavior. :confused: diff --git a/dev-local/README.md b/dev-local/README.md index 33f536508..6afad3043 100644 --- a/dev-local/README.md +++ b/dev-local/README.md @@ -16,6 +16,8 @@ A Mac, out-of-the-box, should "just work," as well as an Intel-based Linux host. ### configuring `kion` +*To run the tools, you must be in a `kion` shell. What follows is a way to set up an alias that makes running the correct configuration easier.* + You should already have a `.kion.yaml` in your home directory. If not, follow the [local desktop development](https://confluence.cms.gov/spaces/BB2/pages/484224999/Local+Desktop+Development) onboarding docs to set up Cloudtamer/`kion`. You need to add an alias in the previously mentioned .kion.yaml for this tooling to work. diff --git a/dev-local/diffs b/dev-local/diffs new file mode 100644 index 000000000..e69de29bb diff --git a/dev-local/fetch-and-update-css.bash b/dev-local/fetch-and-update-css.bash new file mode 100644 index 000000000..95b556fb1 --- /dev/null +++ b/dev-local/fetch-and-update-css.bash @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +fetch_and_update_css () { + if [ ! -d '../bluebutton-css' ] + then + pushd .. + git clone https://github.com/CMSgov/bluebutton-css.git + popd + else + pushd ../bluebutton-css; + git fetch --all + git pull --all + popd + + echo '🆗 CSS already installed. Fetched/pulled.' + fi +} diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash index aaef2c374..294151b33 100755 --- a/dev-local/run-appropriate-stack.bash +++ b/dev-local/run-appropriate-stack.bash @@ -1,5 +1,6 @@ #!/usr/bin/env bash source ./utility-functions.bash +source ./fetch-and-update-css.bash # this says to "export all variables." set -a @@ -9,6 +10,8 @@ set -e # bfd = local | test | sbx # auth = mock | live +fetch_and_update_css + # let's make sure we have a valid ENV var before proceeding check_valid_env @@ -48,9 +51,15 @@ fi cleanup_docker_stack -echo "📊 Vernier start." -echo - -docker compose \ +if [[ "${daemon}" == "1" ]]; then + docker compose \ -f docker-compose-local.yaml \ - up \ No newline at end of file + up \ + --detach +else + echo "📊 Tailing logs." + echo + docker compose \ + -f docker-compose-local.yaml \ + up +fi diff --git a/dev-local/start-local.sh b/dev-local/start-local.sh index a60cf4cee..f8b7cc1c7 100755 --- a/dev-local/start-local.sh +++ b/dev-local/start-local.sh @@ -1,7 +1,9 @@ #!/usr/bin/env bash +set -e set -a + if [ "${DB_MIGRATIONS}" = "true" ] then echo "🔵 running migrations" @@ -18,6 +20,9 @@ then else echo "🆗 ${SUPER_USER_NAME} already exists." fi + + python manage.py create_test_feature_switches + echo "🆗 create_test_feature_switches" python manage.py create_admin_groups echo "🆗 create_admin_groups" @@ -29,24 +34,16 @@ then echo "🆗 create_blue_button_scopes" python manage.py create_test_user_and_application + echo "🆗 create_test_user_and_application" python manage.py create_user_identification_label_selection echo "🆗 create_user_identification_label_selection" - python manage.py create_test_feature_switches - echo "🆗 create_test_feature_switches" else echo "restarting blue button server, no db image migration and models initialization will run here, you might need to manually run DB image migrations." fi -if [ ! -d 'bluebutton-css' ] -then - git clone https://github.com/CMSgov/bluebutton-css.git -else - echo 'CSS already installed.' -fi - if [ "${BB20_ENABLE_REMOTE_DEBUG}" = true ] then if [ "${BB20_REMOTE_DEBUG_WAIT_ATTACH}" = true ] diff --git a/dev-local/utility-functions.bash b/dev-local/utility-functions.bash index 201443620..e44d95760 100755 --- a/dev-local/utility-functions.bash +++ b/dev-local/utility-functions.bash @@ -234,9 +234,10 @@ set_salt () { DJANGO_SLSX_TOKEN_ENDPOINT="http://msls:8080/sso/session" DJANGO_SLSX_SIGNOUT_ENDPOINT="http://msls:8080/sso/signout" DJANGO_SLSX_USERINFO_ENDPOINT="http://msls:8080/v1/users" - + DJANGO_SLSX_CLIENT_ID=bb2api DJANGO_SLSX_CLIENT_SECRET="xxxxx" + DJANGO_PASSWORD_HASH_ITERATIONS="200000" DJANGO_SLSX_VERIFY_SSL_INTERNAL="False" diff --git a/hhs_oauth_server/settings/base.py b/hhs_oauth_server/settings/base.py index 8720a15ea..aaeb84b0c 100644 --- a/hhs_oauth_server/settings/base.py +++ b/hhs_oauth_server/settings/base.py @@ -11,6 +11,39 @@ from django.utils.translation import gettext_lazy as _ from .themes import THEMES, THEME_SELECTED +# SUPPRESSING WARNINGS TO QUIET THE LAUNCH PROCESS +# We want the launch to generally be quiet, and only tell us things +# that worked, or announce genuine errors. +# We currently have around 6 warnings on URL endpoints. +# +# https://stackoverflow.com/questions/41449814/django-url-warning-urls-w002 +# We can either use APPEND_SLASH or SILENCE_SYSTEM_CHECKS to quiet some warnings +# around trailing slashes in URLs. There is no risk/danger/problem with having +# them---Django is just opinionated. +# +# By using the SILENCE_SYSTEM_CHECKS, we just suppress warnings like +# +# ?: (urls.W002) Your URL pattern '/bfd/?$' has a route beginning with a '/'. +# Remove this slash as it is unnecessary. If this pattern is targeted in an +# include(), ensure the include() pattern has a trailing '/'. +SILENCED_SYSTEM_CHECKS = ['urls.W002'] +# +# If we use APPEND_SLASH, it also suppresses the warnings, but it also +# changes Django's behavior. For example, +# +# localhost:8000/admin +# +# no longer works. You MUST then use +# +# localhost:8000/admin/ +# +# Because this changes behavior, we should either +# +# 1. Update our URL pattern rules, or +# 2. Suppress the warnings, as they do not represent a security issue +# +# But should not change app behavior unless we test that thoroughly. +# APPEND_SLASH = False # project root folder BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) From 962291a014de5bf9fa93c2a609a80cce4cd60ebe Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Thu, 20 Nov 2025 09:39:11 -0500 Subject: [PATCH 10/14] Removing script, out of scope --- dev-local/diffs | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 dev-local/diffs diff --git a/dev-local/diffs b/dev-local/diffs deleted file mode 100644 index e69de29bb..000000000 From 5a9d396f14619aacc5ab9996eef26d7a90c4ee5b Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Thu, 20 Nov 2025 12:19:59 -0500 Subject: [PATCH 11/14] Incremental/bug fixes. --- dev-local/.env.container | 1 + dev-local/Dockerfile.local | 4 -- dev-local/README.md | 44 ++++++++++++------- dev-local/docker-compose-local.yaml | 31 +------------ dev-local/run-tests-in-both-environments.bash | 4 -- dev-local/utility-functions.bash | 32 ++++++++++++-- 6 files changed, 60 insertions(+), 56 deletions(-) delete mode 100644 dev-local/run-tests-in-both-environments.bash diff --git a/dev-local/.env.container b/dev-local/.env.container index d4cce118a..a6f26fbad 100644 --- a/dev-local/.env.container +++ b/dev-local/.env.container @@ -51,3 +51,4 @@ RUNNING_IN_LOCAL_STACK="${RUNNING_IN_LOCAL_STACK}" SUPER_USER_EMAIL="${SUPER_USER_EMAIL}" SUPER_USER_NAME="${SUPER_USER_NAME}" SUPER_USER_PASSWORD="${SUPER_USER_PASSWORD}" +LOCAL_TESTING_TARGET="${LOCAL_TESTING_TARGET}" \ No newline at end of file diff --git a/dev-local/Dockerfile.local b/dev-local/Dockerfile.local index cbc282660..bb34f457a 100644 --- a/dev-local/Dockerfile.local +++ b/dev-local/Dockerfile.local @@ -1,7 +1,5 @@ -# FIXME: Update to most recent python. FROM python:3.11-trixie -# FIXME: Uncertain implications ENV PYTHONUNBUFFERED=1 ENV PYDEVD_DISABLE_FILE_VALIDATION=1 @@ -24,8 +22,6 @@ USER DEV ADD .. /code WORKDIR /code -# FIXME: Is this how to use venvs -# reliably in a Dockerfile? RUN python -m venv /tmp/venv RUN . /tmp/venv/bin/activate ENV PATH="/tmp/venv/bin:${PATH}" diff --git a/dev-local/README.md b/dev-local/README.md index 6afad3043..dee18d38f 100644 --- a/dev-local/README.md +++ b/dev-local/README.md @@ -8,6 +8,10 @@ The containerized local build should provide a local development experience that These tools assume you are a developer working on the project, and have access to the VPN and other systems. +## TL;DR + + + ## pre-requisites It is assumed you have a *NIX-like shell, and have the ability to run GNU Make or a reasonable fascimilie thereof. @@ -16,7 +20,7 @@ A Mac, out-of-the-box, should "just work," as well as an Intel-based Linux host. ### configuring `kion` -*To run the tools, you must be in a `kion` shell. What follows is a way to set up an alias that makes running the correct configuration easier.* +*To run the tools, you must be in a `kion` shell. What follows is a way to set up an alias that makes running the correct configuration easier. You can also run `kion stak` or `kion s` and navigate the menus to achieve a similar result.* You should already have a `.kion.yaml` in your home directory. If not, follow the [local desktop development](https://confluence.cms.gov/spaces/BB2/pages/484224999/Local+Desktop+Development) onboarding docs to set up Cloudtamer/`kion`. @@ -31,7 +35,7 @@ Then add the alias as: ``` favorites: - - name: BB2-NON-PROD + - name: bbnp account: cloud_access_role: access_type: cli @@ -40,10 +44,10 @@ favorites: If you already have some aliases, you can just add this one to the list. The account number and cloud access role can be obtained from the Cloudtamer dashboard. This is not strictly necessary, as the tooling cannot automate a `kion` call *and* then continue, as `kion` opens a new shell. However, it is much easier (and, for this documentation, the preferred method) to invoke ``` -kion f BB2-NON-PROD +kion f bbnp ``` -than to navigate a menu structure. You may ultimately choose a shorter alias, e.g. `kion f bnp`. +than to navigate a menu structure. You may ultimately choose a shorter alias, e.g. `kion f bbnp`. ## to start @@ -93,26 +97,36 @@ make run-local bfd=test auth=live Each combination has different implications. Only some make sense at this time (Nov '25): -| bfd / auth | mock | live | +| | auth=mock | auth=live | | --- | --- | --- | -| local | local unit tests | testing SLSX sequences | -| test | ⛔ | Full-up against `test` | -| sbx | ⛔ | Full-up against `sbx` | +| **bfd=local** | local unit tests | testing SLSX sequences | +| **bfd=test** | ⛔ | Full-up against `test` | +| **bfd=sbx** | ⛔ | Full-up against `sbx` | * `local/mock`: This makes sense for running unit tests; only local tests will run in this configuration. -* `local/live`: Manual testing of SLSX sequences should be able to be performed with this combination (TBD) -* `test/mock`: Not a valid condition; a mock authentication will not work with a live server. +* `local/live`: Manual testing of SLSX sequences should be able to be performed with this combination. No BFD/FHIR URLs are set, though, which may break things. +* `test/mock`: *Not a valid condition*; a mock authentication will not work with a live server. * `test/live`: Live SLSX exchanges with medicare.gov and calls against the `test` BFD environment. -* `sbx/mock`: Not a valid condition. +* `sbx/mock`: *Not a valid condition*. * `sbx/live`: Live SLSX exchanges and calls against the `sbx` BFD environment. +### running daemonized + +You cann add `daemon=1` to any of the above commands, and the stack will run in the background. + +For example: + +``` +make run-local bfd=test auth=live daemon=1 +``` + ### running against `test` When launched with ``` -make run-local TARGET=test +make run-local bfd=test auth=live ``` the tooling obtains and sources credentials for running against our `test` environment. @@ -122,7 +136,7 @@ the tooling obtains and sources credentials for running against our `test` envir Similarly, ``` -make run-local TARGET=sbx +make run-local bfd=sbx auth=live ``` runs against SBX. @@ -135,9 +149,9 @@ runs against SBX. In a nutshell: -1. Run `kion f BB2-NON-PROD` to authenticate/obtain AWS credentials. +1. Run `kion f bbnp` to authenticate/obtain AWS credentials. 1. Obtain certificates for the remote environment (if you selected `test` or `sbx`) -1. `docker compose --profile mock-sls up` for `local`, `docker compose --profile slsx up` for live envs. +1. Pass appropriate env vars through to the app, based on choices. ## future work diff --git a/dev-local/docker-compose-local.yaml b/dev-local/docker-compose-local.yaml index ef1fe1a33..4cd5a246a 100644 --- a/dev-local/docker-compose-local.yaml +++ b/dev-local/docker-compose-local.yaml @@ -36,33 +36,4 @@ services: - "5678:5678" depends_on: - db - - msls - - # web_msls: - # build: . - # command: ./start-local.sh - # env_file: - # - .env.local - # volumes: - # - ..:/code - # - ~/.bb2/certstore:/certstore - # ports: - # - "8000:8000" - # - "5678:5678" - # depends_on: - # - db - # - msls - # profiles: - # - mocksls - # platform: linux/amd64 - # unittests: - # build: . - # command: python3 -m debugpy --listen 0.0.0.0:6789 --wait-for-client runtests.py - # env_file: - # - docker-compose/unittests-env-vars.env - # ports: - # - "6789:6789" - # volumes: - # - .:/code - # profiles: - # - tests + - msls \ No newline at end of file diff --git a/dev-local/run-tests-in-both-environments.bash b/dev-local/run-tests-in-both-environments.bash deleted file mode 100644 index b13bf069a..000000000 --- a/dev-local/run-tests-in-both-environments.bash +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash - -export ENV=test -docker compose up --profile slsx \ No newline at end of file diff --git a/dev-local/utility-functions.bash b/dev-local/utility-functions.bash index e44d95760..bee2a1b14 100755 --- a/dev-local/utility-functions.bash +++ b/dev-local/utility-functions.bash @@ -19,12 +19,27 @@ check_valid_env () { ##### # ERR else - echo "'bfd' must be set to 'local', 'test', or 'sbx'." - echo "'bfd' is currently set to '${bfd}'." + echo "⛔ 'bfd' must be set to 'local', 'test', or 'sbx'." + echo "⛔ 'bfd' is currently set to '${bfd}'." echo "Exiting." return -2 fi + + if [[ "${bfd}" == "local" && "${auth}" == "live" ]]; then + echo "⚠️ ${bfd}/${auth} may work for SLSX testing, but not for BFD calls." + fi + + if [[ "${bfd}" == "test" && "${auth}" == "mock" ]]; then + echo "⛔ ${bfd}/${auth} is not a valid combination. Exiting." + return -3 + fi + + if [[ "${bfd}" == "sbx" && "${auth}" == "mock" ]]; then + echo "⛔ ${bfd}/${auth} is not a valid combination. Exiting." + return -4 + fi + echo "✅ check_valid_env" } @@ -96,16 +111,20 @@ set_bfd_urls () { if [[ "${bfd}" == "local" ]]; then echo "⚠️ No FHIR URLs set for local testing." echo " There are no mock BFD endpoints for local testing at this time." + export LOCAL_TESTING_TARGET="local" ##### # TEST elif [[ "${bfd}" == "test" ]]; then export FHIR_URL="${FHIR_URL_TEST}" export FHIR_URL_V3="${FHIR_URL_V3_TEST}" + export LOCAL_TESTING_TARGET="test" ##### # SBX elif [[ "${bfd}" == "sbx" ]]; then export FHIR_URL="${FHIR_URL_SBX}" export FHIR_URL_V3="${FHIR_URL_V3_SBX}" + # FIXME: Do we use "impl" or "sbx"? ... + export LOCAL_TESTING_TARGET="impl" fi echo "✅ set_bfd_urls" @@ -143,6 +162,10 @@ set_auth_profile () { export CERT_AND_SALT="YES" retrieve_certs () { + echo "🎁 Retrieving certs for the '${bfd}' environment with suffix '${CERT_SUFFIX}'." + + unset CERT_SUFFIX + if [[ "${bfd}" == "local" ]]; then echo "🆗 Running locally. Not retrieving certs." echo "🆗 Running locally. Not retrieving salt." @@ -173,10 +196,12 @@ retrieve_certs () { KEY="ca.key.nocrypt.pem" # Remove them first + echo " Removing ${BB2_CERTSTORE}/$CERT" rm -f "${BB2_CERTSTORE}/$CERT" + echo " Removing ${BB2_CERTSTORE}/$KEY" rm -f "${BB2_CERTSTORE}/$KEY" - echo "🎁 Retrieving certs for the '${bfd}' environment with suffix '${CERT_SUFFIX}'." + echo " Fetching ${BB2_CERTSTORE}/$CERT" aws secretsmanager get-secret-value \ --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_certificate${CERT_SUFFIX} \ --query 'SecretString' \ @@ -187,6 +212,7 @@ retrieve_certs () { return -3 fi + echo " Fetching ${BB2_CERTSTORE}/$KEY" aws secretsmanager get-secret-value \ --secret-id /bb2/local_integration_tests/fhir_client/certstore/local_integration_tests_private_key${CERT_SUFFIX} \ --query 'SecretString' \ From 9f3dea78e76f31c5fb18d6417cf229d50363e93e Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Fri, 21 Nov 2025 09:46:33 -0500 Subject: [PATCH 12/14] Fixes. --- apps/authorization/views.py | 2 +- .../commands/create_test_feature_switches.py | 2 +- .../tests/test_wellknown_endpoints.py | 6 +- .../create_test_user_and_application.py | 227 +++++++----------- apps/testclient/tests.py | 39 +-- dev-local/README.md | 15 ++ dev-local/run-appropriate-stack.bash | 2 +- dev-local/utility-functions.bash | 3 +- 8 files changed, 141 insertions(+), 155 deletions(-) diff --git a/apps/authorization/views.py b/apps/authorization/views.py index 2f5a6dcd3..89b1de330 100644 --- a/apps/authorization/views.py +++ b/apps/authorization/views.py @@ -31,7 +31,7 @@ class Meta: fields = ('id', 'name', 'logo_uri', 'tos_uri', 'policy_uri', 'contacts') def get_contacts(self, obj): - print(obj) + # print(obj) application = Application.objects.get(id=obj.id) return application.support_email or "" diff --git a/apps/core/management/commands/create_test_feature_switches.py b/apps/core/management/commands/create_test_feature_switches.py index 20013195a..b748c0233 100644 --- a/apps/core/management/commands/create_test_feature_switches.py +++ b/apps/core/management/commands/create_test_feature_switches.py @@ -62,7 +62,7 @@ def handle(self, *args, **options): flag_obj.save() self._log("User {} added to feature flag: {}".format(u, flag)) except Exception as e: - print(e) + # print(e) self._log("Exception when adding user {} to feature flag: {}".format(u, flag)) except User.DoesNotExist: # assuming test users exist before creating flags associated with them diff --git a/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py b/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py index 5611931ef..241d6aebe 100644 --- a/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py +++ b/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py @@ -147,11 +147,11 @@ def test_smart_configuration_missing_fields_in_v3(self): # is commented above for reference. @skipIf((not settings.RUN_ONLINE_TESTS), "Can't reach external sites.") - # This overrides the switch and sets it to true, always. - # We should only run the test if we have v3 enabled. @override_switch('v3_endpoints', active=True) def test_fhir_metadata_extensions_have_v3(self): - response = self.client.get(f'{BASEURL}/v3/fhir/metadata') + the_url = f'{BASEURL}/v3/fhir/metadata' + print(the_url) + response = self.client.get(the_url) self.assertEqual(response.status_code, 200) json = response.json() self.assertIn('v3', json['implementation']['url']) diff --git a/apps/testclient/management/commands/create_test_user_and_application.py b/apps/testclient/management/commands/create_test_user_and_application.py index f2e778973..9a6e91056 100644 --- a/apps/testclient/management/commands/create_test_user_and_application.py +++ b/apps/testclient/management/commands/create_test_user_and_application.py @@ -17,9 +17,10 @@ # Imports for quieting things during startup. from waffle.models import Switch +from uuid import uuid4 -def create_group(name="BlueButton"): +def create_group(name="BlueButton"): g, created = Group.objects.get_or_create(name=name) if created: print("%s group created" % (name)) @@ -27,178 +28,136 @@ def create_group(name="BlueButton"): print("%s group pre-existing. Create skipped." % (name)) return g - -def get_switch(name): - try: - sw = Switch.objects.get(name=name) - return sw.active - except Exception as e: - print(f"Could not get switch {name}: {e}") - - -def set_switch(name, b): - # DISABLE SOME WAFFLE SWITCHES - # We don't want email, etc. - sw, _ = Switch.objects.get_or_create(name=name) - sw.active = b - sw.save() - # usr would be a string if it is anything -def create_user(group, usr): - u_name = "rogersf" +def create_user(the_group): + username = "rogersf" first_name = "Fred" last_name = "Rogers" - email = "fred@landofmakebelieve.gov" - password = "danielthetiger" + email = "mrrogers@landofmakebelieve.gov" + password = uuid4() user_type = "BEN" - if usr is not None: - u_name = usr - first_name = "{}{}".format(usr, "First") - last_name = "{}{}".format(usr, "Last") - email = "{}.{}@{}".format(first_name, last_name, email) - user_type = "DEV" - - # This violates constraints on other tables. - usr_q = User.objects.filter(username=u_name) - if usr_q.exists(): - # Delete any ADAGs for this user, or we will run into a - # constraint issue at startup. - count = ArchivedDataAccessGrant.objects.filter(beneficiary=usr_q.first()).delete() - print(f"Deleted {count} ADAGs for {u_name}") - count = DataAccessGrant.objects.filter(beneficiary=usr_q.first()).delete() - print(f"Deleted {count} ADAGs for {u_name}") - - User.objects.filter(username=u_name).delete() - - u = None - - if usr is not None: - try: - u, _ = User.objects.get_or_create(username=u_name, - first_name=first_name, - last_name=last_name, - email=email, - signals_to_disable=["post_save"]) - u.set_unusable_password() - except Exception as e: - print(f"Did not create user: {e}") - else: - # create a sample user 'fred' for dev local that has a usable password - try: - # get_or_create returns a tuple (v, bool) - u, _ = User.objects.get_or_create(username=u_name, - first_name=first_name, - last_name=last_name, - email=email, - password=password,) - - UserProfile.objects.create(user=u, - user_type=user_type, - create_applications=True, - password_reset_question_1='1', - password_reset_answer_1='blue', - password_reset_question_2='2', - password_reset_answer_2='Frank', - password_reset_question_3='3', - password_reset_answer_3='Bentley') - except Exception as e: - print(f"Did not create user and profile: {e}") - - if u is None: - print(f"Error creating user; exiting.") - else: - u.groups.add(group) - + # We will do this over-and-over. + # If we don't already exist, then create the user. + if User.objects.filter(username=username).exists(): + print(f"👟 {username} already exists. Skipping test user creation.") + return User.objects.get(username=username) + + # If the user didn't exist, it is our first time through. + # Create the user. + user_obj = User.objects.create(username=username, + first_name=first_name, + last_name=last_name, + email=email, + password=password,) + user_obj.set_unusable_password() + UserProfile.objects.create(user=user_obj, + user_type=user_type, + create_applications=True, + password_reset_question_1='1', + password_reset_answer_1='blue', + password_reset_question_2='2', + password_reset_answer_2='Frank', + password_reset_question_3='3', + password_reset_answer_3='Bentley') + user_obj.groups.add(the_group) + + # CROSSWALK + # Removing any existing crosswalks for this artificial user. + # Why? Just in case. user_id_hash = "ee78989d1d9ba0b98f3cfbd52479f10c7631679c17563186f70fbef038cc9536" Crosswalk.objects.filter(_user_id_hash=user_id_hash).delete() - c, _ = Crosswalk.objects.get_or_create(user=u, - fhir_id_v2=settings.DEFAULT_SAMPLE_FHIR_ID_V2, - _user_id_hash=user_id_hash) - return u + Crosswalk.objects.get_or_create(user=user_obj, + fhir_id_v2=settings.DEFAULT_SAMPLE_FHIR_ID_V2, + _user_id_hash=user_id_hash) + return user_obj + +def create_application(user): + app_name = "TestApp" + if Application.objects.filter(name=app_name).exists(): + return Application.objects.get(name=app_name) + + # If the app doesn't exist, create the test app. -def create_application(user, group, app, redirect): - app_name = "TestApp" if app is None else app Application.objects.filter(name=app_name).delete() redirect_uri = "{}{}".format(settings.HOSTNAME_URL, settings.TESTCLIENT_REDIRECT_URI) - if redirect: - redirect_uri = redirect + the_app = Application.objects.create(name=app_name, + redirect_uris=redirect_uri, + user=user, + data_access_type="THIRTEEN_MONTH", + client_type="confidential", + authorization_grant_type="authorization-code",) - if not (redirect_uri.startswith("http://") or redirect_uri.startswith("https://")): - redirect_uri = "https://" + redirect_uri + titles = ["My Medicare and supplemental coverage information.", + "My Medicare claim information.", + "My general patient and demographic information.", + "Profile information including name and email." + ] - try: - a = Application.objects.create(name=app_name, - redirect_uris=redirect_uri, - user=user, - data_access_type="THIRTEEN_MONTH", - client_type="confidential", - authorization_grant_type="authorization-code",) - - titles = ["My Medicare and supplemental coverage information.", - "My Medicare claim information.", - "My general patient and demographic information.", - "Profile information including name and email." - ] - - for t in titles: - c = ProtectedCapability.objects.get(title=t) - a.scope.add(c) - return a - except Exception as e: - print(f"Skipped creation of {app_name}: {e}") + for t in titles: + c = ProtectedCapability.objects.get(title=t) + the_app.scope.add(c) + return the_app -def create_test_token(user, application): +def create_test_token(the_user, the_app): + + # Set expiration one day from now. now = timezone.now() expires = now + timedelta(days=1) - scopes = application.scope.all() + scopes = the_app.scope.all() scope = [] for s in scopes: scope.append(s.slug) - t = AccessToken.objects.create(user=user, application=application, + # We have to have a tokent with token="sample-token-string", because we + # rely on it existing for unit tests. Which are actually integration tests. + if AccessToken.objects.filter(token="sample-token-string").exists(): + t = AccessToken.objects.get(token="sample-token-string") + t.expires = expires + t.save() + else: + AccessToken.objects.create(user=the_user, + application=the_app, + # This needs to be "sample-token-string", because + # we have tests that rely on it. token="sample-token-string", expires=expires, scope=' '.join(scope),) - return t + +def get_switch(name): + try: + sw = Switch.objects.get(name=name) + return sw.active + except Exception as e: + print(f"Could not get switch {name}: {e}") + + +def set_switch(name, b): + sw, _ = Switch.objects.get_or_create(name=name) + sw.active = b + sw.save() class Command(BaseCommand): help = 'Create a test user and application for the test client' - def add_arguments(self, parser): - parser.add_argument("-u", "--user", help="Name of the user to be created (unique).") - parser.add_argument("-a", "--app", help="Name of the application to be created (unique).") - parser.add_argument("-r", "--redirect", help="Redirect url of the application.") - def handle(self, *args, **options): - usr = options.get("user", None) - app = options.get("app", None) - redirect = options["redirect"] set_switch('outreach_email', False) - g = create_group() - u = create_user(g, usr) - print(f"Created user {u}") - a = create_application(u, g, app, redirect) - t = None - if usr is None and app is None: - t = create_test_token(u, a) - update_grants() - print("Name:", a.name) - print("client_id:", a.client_id) - print("client_secret:", a.client_secret) - print("access_token:", t.token if t else "None") - print("redirect_uri:", a.redirect_uris) + the_group = create_group() + the_user = create_user(the_group) + the_app = create_application(the_user) + create_test_token(the_user, the_app) + update_grants() # Restore switch to whatever it was. set_switch('outreach_email', True) diff --git a/apps/testclient/tests.py b/apps/testclient/tests.py index beeb07ca7..fd0546e42 100644 --- a/apps/testclient/tests.py +++ b/apps/testclient/tests.py @@ -11,6 +11,8 @@ from apps.testclient.views import FhirDataParams, _build_pagination_uri from django.http import HttpRequest +import os + class TestclientHelpers(TestCase): def test_ormap(self): @@ -307,15 +309,15 @@ def test_offset_math(self): response = self.client.get(uri) response_data = response.json() self.assertEqual(response.status_code, 200) - # self.assertEqual(response_data["total"], 32) - # 20251022 MCJ - # For some reason, this no longer passes when asserted equal to 7. - # I do not know what data we test against, if it is consistent, etc. - # I have updated the test to `5`, and it passes. If the data is potentially variable/not in - # our control, then these unit tests will always be suspect (including offsets and pagination values). - # This seems to have been the case 7mo ago with the "total" test, above. - # self.assertEqual(len(response_data["entry"]), 7) - self.assertEqual(len(response_data["entry"]), 5) + + # Different environments have different data in them. + # If we are testing against sandbox, we expect fewer responses. + + if os.getenv("LOCAL_TESTING_TARGET", None) in ["impl"]: + self.assertEqual(len(response_data["entry"]), 12) + else: + self.assertEqual(len(response_data["entry"]), 5) + previous_links = [ data["url"] for data in response_data["link"] @@ -327,11 +329,20 @@ def test_offset_math(self): first_links = [ data["url"] for data in response_data["link"] if data["relation"] == "first" ] - self.assertEqual(len(previous_links), 1) - self.assertEqual(len(next_links), 0) - self.assertEqual(len(first_links), 1) - self.assertIn("startIndex=13", previous_links[0]) - self.assertIn("startIndex=0", first_links[0]) + + if os.getenv("LOCAL_TESTING_TARGET", None) in ["impl"]: + self.assertEqual(len(previous_links), 1) + self.assertEqual(len(next_links), 1) + self.assertEqual(len(first_links), 1) + self.assertIn("startIndex=13", previous_links[0]) + self.assertIn("startIndex=0", first_links[0]) + else: + self.assertEqual(len(previous_links), 1) + self.assertEqual(len(next_links), 0) + self.assertEqual(len(first_links), 1) + self.assertIn("startIndex=13", previous_links[0]) + self.assertIn("startIndex=0", first_links[0]) + self.assertContains(response, "ExplanationOfBenefit") def _test_get_eob_negative(self, version=Versions.NOT_AN_API_VERSION): diff --git a/dev-local/README.md b/dev-local/README.md index dee18d38f..8183b2f57 100644 --- a/dev-local/README.md +++ b/dev-local/README.md @@ -10,6 +10,21 @@ These tools assume you are a developer working on the project, and have access t ## TL;DR +``` +make build-local +``` + +And then + +``` +make run-local bfd=test auth=live daemon=1 +``` + +or maybe + +``` +make run-local bfd=sbx auth=live +``` ## pre-requisites diff --git a/dev-local/run-appropriate-stack.bash b/dev-local/run-appropriate-stack.bash index 294151b33..484345901 100755 --- a/dev-local/run-appropriate-stack.bash +++ b/dev-local/run-appropriate-stack.bash @@ -38,7 +38,7 @@ retrieve_certs set_salt -echo "🚀 Launching the stack for '${ENV}'." +echo "🚀 Launching the stack for '${bfd}/${auth}'." if [[ "${bfd}" == "local" ]]; then echo "🥶 FHIR_URLs are not set when running locally." diff --git a/dev-local/utility-functions.bash b/dev-local/utility-functions.bash index bee2a1b14..e216e5e18 100755 --- a/dev-local/utility-functions.bash +++ b/dev-local/utility-functions.bash @@ -162,7 +162,6 @@ set_auth_profile () { export CERT_AND_SALT="YES" retrieve_certs () { - echo "🎁 Retrieving certs for the '${bfd}' environment with suffix '${CERT_SUFFIX}'." unset CERT_SUFFIX @@ -183,7 +182,9 @@ retrieve_certs () { export PROFILE="slsx" fi + if [[ "${CERT_AND_SALT}" == "YES" ]]; then + echo "🎁 Retrieving certs for the '${bfd}' environment with suffix '${CERT_SUFFIX}'." # We will (rudely) create a .bb2 directory in the user's homedir. # Let's call that BB2_CONFIG_DIR export BB2_CONFIG_DIR="${HOME}/.bb2" From bbec178fd4a48d378e0b901b161a8832b5fd1246 Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Fri, 21 Nov 2025 10:58:42 -0500 Subject: [PATCH 13/14] Fixed sandbox FHIRv3 URL. --- dev-local/.env.local.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-local/.env.local.example b/dev-local/.env.local.example index 9a4f82515..bab7f8844 100644 --- a/dev-local/.env.local.example +++ b/dev-local/.env.local.example @@ -40,7 +40,7 @@ DJANGO_SECRET_KEY=replace-me-with-real-secret FHIR_URL_TEST="https://test.fhir.bfd.cmscloud.local" FHIR_URL_V3_TEST="https://test.fhirv3.bfd.cmscloud.local" FHIR_URL_SBX="https://prod-sbx.fhir.bfd.cmscloud.local" -FHIR_URL_V3_SBX="https://prod-sbx.fhirv3.bfd.cmscloud.local" +FHIR_URL_V3_SBX="https://sandbox.fhirv3.bfd.cmscloud.local" ################################################################################ # DATABASE From 592c5e5cbb0fe12601d79c84cfa09f3240a7663a Mon Sep 17 00:00:00 2001 From: Matt Jadud Date: Fri, 21 Nov 2025 13:52:47 -0500 Subject: [PATCH 14/14] Removing comments, unneeded prints. --- apps/authorization/models.py | 11 ++++------- apps/authorization/views.py | 1 - .../commands/create_test_feature_switches.py | 3 --- .../fhir/bluebutton/tests/test_wellknown_endpoints.py | 1 - .../commands/create_test_user_and_application.py | 2 -- dev-local/utility-functions.bash | 1 - 6 files changed, 4 insertions(+), 15 deletions(-) diff --git a/apps/authorization/models.py b/apps/authorization/models.py index e37d2adfd..ee6d5142c 100644 --- a/apps/authorization/models.py +++ b/apps/authorization/models.py @@ -93,13 +93,10 @@ def update_grants(*args, **kwargs): tokens = AccessToken.objects.all() for token in tokens: if token.is_valid(): - try: - DataAccessGrant.objects.get_or_create( - beneficiary=token.user, - application=token.application, - ) - except Exception as e: - print(f"update_grants: {e}") + DataAccessGrant.objects.get_or_create( + beneficiary=token.user, + application=token.application, + ) def check_grants(): diff --git a/apps/authorization/views.py b/apps/authorization/views.py index bc5f9740c..3d77d9453 100644 --- a/apps/authorization/views.py +++ b/apps/authorization/views.py @@ -32,7 +32,6 @@ class Meta: fields = ('id', 'name', 'logo_uri', 'tos_uri', 'policy_uri', 'contacts') def get_contacts(self, obj): - # print(obj) application = Application.objects.get(id=obj.id) return application.support_email or "" diff --git a/apps/core/management/commands/create_test_feature_switches.py b/apps/core/management/commands/create_test_feature_switches.py index b748c0233..604c21476 100644 --- a/apps/core/management/commands/create_test_feature_switches.py +++ b/apps/core/management/commands/create_test_feature_switches.py @@ -34,7 +34,6 @@ def handle(self, *args, **options): for switch in WAFFLE_FEATURE_SWITCHES: try: Switch.objects.get(name=switch[0]) - # self._log("Feature switch already exists: %s" % (str(switch))) except Switch.DoesNotExist: Switch.objects.create(name=switch[0], active=switch[1], note=switch[2]) self._log("Feature switch created: %s" % (str(switch))) @@ -46,7 +45,6 @@ def handle(self, *args, **options): try: flag_obj = Flag.objects.get(name=flag[0]) - # self._log("Feature flag already exists: %s" % (str(flag_obj))) except Flag.DoesNotExist: flag_obj = Flag.objects.create(name=flag[0]) self._log("Feature flag created: %s" % (str(flag[0]))) @@ -62,7 +60,6 @@ def handle(self, *args, **options): flag_obj.save() self._log("User {} added to feature flag: {}".format(u, flag)) except Exception as e: - # print(e) self._log("Exception when adding user {} to feature flag: {}".format(u, flag)) except User.DoesNotExist: # assuming test users exist before creating flags associated with them diff --git a/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py b/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py index 241d6aebe..719f4ef56 100644 --- a/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py +++ b/apps/fhir/bluebutton/tests/test_wellknown_endpoints.py @@ -150,7 +150,6 @@ def test_smart_configuration_missing_fields_in_v3(self): @override_switch('v3_endpoints', active=True) def test_fhir_metadata_extensions_have_v3(self): the_url = f'{BASEURL}/v3/fhir/metadata' - print(the_url) response = self.client.get(the_url) self.assertEqual(response.status_code, 200) json = response.json() diff --git a/apps/testclient/management/commands/create_test_user_and_application.py b/apps/testclient/management/commands/create_test_user_and_application.py index 9a6e91056..650fdf190 100644 --- a/apps/testclient/management/commands/create_test_user_and_application.py +++ b/apps/testclient/management/commands/create_test_user_and_application.py @@ -28,8 +28,6 @@ def create_group(name="BlueButton"): print("%s group pre-existing. Create skipped." % (name)) return g -# usr would be a string if it is anything - def create_user(the_group): username = "rogersf" diff --git a/dev-local/utility-functions.bash b/dev-local/utility-functions.bash index e216e5e18..d0193a324 100755 --- a/dev-local/utility-functions.bash +++ b/dev-local/utility-functions.bash @@ -315,7 +315,6 @@ set_salt () { # function will try and stop ALL docker containers. cleanup_docker_stack () { DOCKER_PS=$(docker ps -q) - echo $DOCKER_PS TAKE_IT_DOWN="NO" for id in $DOCKER_PS; do