Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
e9113ce
Overrides the QAT Spark bucket for EMR testing
collinwr Jun 10, 2025
0bfaa09
Makes orphaned_award_info a unmanaged table
collinwr Jul 15, 2025
bdb8a56
Makes orphaned_transaction_info a unmanaged table
collinwr Jul 15, 2025
8134cdf
Makes orphaned_transaction_info a unmanaged table (fixes f-string usage)
collinwr Jul 15, 2025
fb41314
Adds IF EXISTS for dropping view to avoid errors
collinwr Jul 16, 2025
76e842b
Merge branch 'qat' of ssh://ssh.github.com:443/fedspendingtransparenc…
collinwr Jul 17, 2025
33cd6ab
Change ES delta views to use CAST(... AS STRING) syntax
sethstoudenmier Jul 17, 2025
d414a65
Move away from subquery to support hive syntax
sethstoudenmier Jul 18, 2025
882acac
Update unlinked awards download to temp tables in S3
sethstoudenmier Jul 30, 2025
1de89b1
Update covid download to use S3 location for readme
sethstoudenmier Jul 31, 2025
c072e36
Clearing the faba table if it exists prior to cloning
sethstoudenmier Aug 6, 2025
dce98a2
sbx testing
boozallendanny Aug 11, 2025
68758f9
Merge branch 'tst/emr-pipeline-testing' of https://github.com/fedspen…
boozallendanny Aug 11, 2025
c0b5041
data_broker_url
boozallendanny Aug 28, 2025
a529acd
change data_broker_url to data_Broker_db
boozallendanny Aug 28, 2025
11a2879
testing removing subquery
zachflanders-frb Sep 12, 2025
1bab179
remove subquery in award_search incremental merge
sethstoudenmier Sep 15, 2025
6263cad
replace data_broker_db with broker_db
boozallendanny Sep 17, 2025
bed5252
Merge branch 'tst/emr-pipeline-testing' into dev-13454-transaction-se…
zachflanders-frb Sep 25, 2025
15e6b99
Merge branch 'tst/emr-pipeline-testing-no-subquery' into tst/emr-pipe…
zachflanders-frb Sep 26, 2025
2d92ba9
Update transaction search gold columns
zachflanders-frb Sep 30, 2025
cb067e0
Update transaction search delta columns
zachflanders-frb Sep 30, 2025
313a803
Cleanup on transaction search delta columns dict
zachflanders-frb Sep 30, 2025
6edde0d
[DEV-13367] - Add schema for world cities
zachflanders-frb Oct 3, 2025
fca63da
update env config for staging and prod
boozallendanny Oct 14, 2025
5572b38
Merge branch 'qat' into ftr/dev-13367-world-cities-schema
zachflanders-frb Oct 15, 2025
573605f
Merge branch 'dev-13454-transaction-search-improvements' into tst/emr…
zachflanders-frb Oct 15, 2025
af62b8e
Merge branch 'qat' into ftr/dev-13367-world-cities-schema
zachflanders-frb Oct 15, 2025
fc38f45
Merge pull request #4509 from fedspendingtransparency/ftr/dev-13367-w…
zachflanders-frb Oct 15, 2025
91f80f2
handle merge conflicts
sethstoudenmier Oct 16, 2025
3c4b446
handle merge conflicts
sethstoudenmier Oct 16, 2025
b91912b
Merge pull request #4518 from fedspendingtransparency/tst/emr-pipelin…
sethstoudenmier Oct 16, 2025
cc98bf3
formatting
sethstoudenmier Oct 16, 2025
5e8a994
update path for COVID download README
sethstoudenmier Oct 16, 2025
d2d00dd
[DEV-13689] update filtering when only a filter is provided, show chi…
loreleitrimberger Oct 22, 2025
c5407e0
testing qat config with spark bucket
boozallendanny Oct 22, 2025
968c853
Merge pull request #4522 from fedspendingtransparency/ftr/dev-13689-u…
loreleitrimberger Oct 23, 2025
0be948a
[DEV-13722] Truncate the File C update table instead of recreating
sethstoudenmier Oct 23, 2025
3c0a09f
Merge pull request #4523 from fedspendingtransparency/mod/dev-13722-u…
sethstoudenmier Oct 27, 2025
da3e1e9
Merge branch 'staging' of https://github.com/fedspendingtransparency/…
sethstoudenmier Oct 27, 2025
01c300a
[DEV-13706] Use varchar for support between Postgres and Hive; add ne…
sethstoudenmier Oct 30, 2025
1949891
[DEV-13706] Update references to old Broker DB env
sethstoudenmier Oct 31, 2025
521cf41
[DEV-13706] Add back test data
sethstoudenmier Oct 31, 2025
fe45b02
[DEV-13706] Some cleanup
sethstoudenmier Oct 31, 2025
0aeca6c
Merge pull request #4532 from fedspendingtransparency/tmp/qat-confif
sethstoudenmier Nov 3, 2025
1e13c9a
Revert change to qat s3 bucket name
sethstoudenmier Nov 5, 2025
2cd0544
Merge pull request #4519 from fedspendingtransparency/tst/emr-pipelin…
boozallendanny Nov 5, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ USASPENDING_DB_PASSWORD=usaspender

# The Broker configuration below supports tests creating a Broker DB on the usaspending-db
# container as part of standing up the test suite.
# All values of BROKER_DB_* must match what is in DATA_BROKER_DATABASE_URL if BOTH are given
DATA_BROKER_DATABASE_URL=postgres://usaspending:usaspender@usaspending-db:5432/data_broker
# All values of BROKER_DB_* must match what is in BROKER_DB if BOTH are given
BROKER_DB=postgres://usaspending:usaspender@usaspending-db:5432/data_broker
# Configuration values for a connection string to a Broker database
# Only necessary for some management commands
BROKER_DB_HOST=usaspending-db
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/init-test-environment/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ runs:
- name: Set combined ENV
shell: bash
run: |
echo "DATA_BROKER_DATABASE_URL=postgres://$BROKER_DB_USER:$BROKER_DB_PASSWORD@$BROKER_DB_HOST:$BROKER_DB_PORT/$BROKER_DB_NAME" >> $GITHUB_ENV
echo "BROKER_DB=postgres://$BROKER_DB_USER:$BROKER_DB_PASSWORD@$BROKER_DB_HOST:$BROKER_DB_PORT/$BROKER_DB_NAME" >> $GITHUB_ENV
echo "DATABASE_URL=postgres://$USASPENDING_DB_USER:$USASPENDING_DB_PASSWORD@$USASPENDING_DB_HOST:$USASPENDING_DB_PORT/$USASPENDING_DB_NAME" >> $GITHUB_ENV
echo "DOWNLOAD_DATABASE_URL=postgres://$USASPENDING_DB_USER:$USASPENDING_DB_PASSWORD@$USASPENDING_DB_HOST:$USASPENDING_DB_PORT/$USASPENDING_DB_NAME" >> $GITHUB_ENV
echo "ES_HOSTNAME=$ES_SCHEME://$ES_HOST:$ES_PORT" >> $GITHUB_ENV
Expand Down
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ Create a `.envrc` file in the repo root, which will be ignored by git. Change cr
```shell
export DATABASE_URL=postgres://usaspending:usaspender@localhost:5432/data_store_api
export ES_HOSTNAME=http://localhost:9200
export DATA_BROKER_DATABASE_URL=postgres://admin:root@localhost:5435/data_broker
export BROKER_DB=postgres://admin:root@localhost:5435/data_broker
```

If `direnv` does not pick this up after saving the file, type
Expand Down Expand Up @@ -220,10 +220,10 @@ Deployed production API endpoints and docs are found by following links here: `h

3. To run all USAspending tests in the docker services run
```shell
docker compose run --rm -e DATA_BROKER_DATABASE_URL='' usaspending-test
docker compose run --rm -e BROKER_DB='' usaspending-test
```

_**NOTE**: If an env var named `DATA_BROKER_DATABASE_URL` is set, Broker Integration tests will attempt to be run as well. If doing so, Broker dependencies must be met (see below) or ALL tests will fail hard. Running the above command with `-e DATA_BROKER_DATABASE_URL=''` is a precaution to keep them excluded, unless you really want them (see below if so)._
_**NOTE**: If an env var named `BROKER_DB` is set, Broker Integration tests will attempt to be run as well. If doing so, Broker dependencies must be met (see below) or ALL tests will fail hard. Running the above command with `-e BROKER_DB=''` is a precaution to keep them excluded, unless you really want them (see below if so)._

To run tests locally and not in the docker services, you need:

Expand Down Expand Up @@ -273,7 +273,7 @@ To satisfy these dependencies and include execution of these tests, do the follo
```shell
docker build -t dataact-broker-backend ../data-act-broker-backend
```
1. Ensure you have the `DATA_BROKER_DATABASE_URL` environment variable set, and it points to what will be a live PostgreSQL server (no database required) at the time tests are run.
1. Ensure you have the `BROKER_DB` environment variable set, and it points to what will be a live PostgreSQL server (no database required) at the time tests are run.
1. _WARNING: If this is set at all, then ALL above dependencies must be met or ALL tests will fail (Django will try this connection on ALL tests' run)_
1. This DB could be one you always have running in a local Postgres instance, or one you spin up in a Docker container just before tests are run
1. If invoking `pytest` within a docker container (e.g. using the `usaspending-test` container), you _must_ mount the host's docker socket. This is declared already in the `docker-compose.yml` file services, but would be done manually with: `-v /var/run/docker.sock:/var/run/docker.sock`
Expand All @@ -286,15 +286,15 @@ Re-running the test suite using `pytest -rs` with these dependencies satisfied s

_From within a container_

_**NOTE**: `DATA_BROKER_DATABASE_URL` is set in the `docker-compose.yml` file (and could pick up `.env` values, if set)_
_**NOTE**: `BROKER_DB` is set in the `docker-compose.yml` file (and could pick up `.env` values, if set)_

```shell
docker compose run --rm usaspending-test pytest --capture=no --verbose --tb=auto --no-cov --log-cli-level=INFO -k test_broker_integration
```

_From Developer Desktop_

_**NOTE**: `DATA_BROKER_DATABASE_URL` is set in the `.envrc` file and available in the shell_
_**NOTE**: `BROKER_DB` is set in the `.envrc` file and available in the shell_
```shell
pytest --capture=no --verbose --tb=auto --no-cov --log-cli-level=INFO -k test_broker_integration
```
Expand Down
10 changes: 7 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ services:
DJANGO_DEBUG: ${DJANGO_DEBUG}
DATABASE_URL: postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/data_store_api
ES_HOSTNAME: ${ES_HOSTNAME}
DATA_BROKER_DATABASE_URL: postgresql://${BROKER_DB_USER}:${BROKER_DB_PASSWORD}@${BROKER_DB_HOST}:${BROKER_DB_PORT}/data_broker
BROKER_DB: postgresql://${BROKER_DB_USER}:${BROKER_DB_PASSWORD}@${BROKER_DB_HOST}:${BROKER_DB_PORT}/data_broker

usaspending-test:
profiles:
Expand All @@ -68,7 +68,7 @@ services:
DATABASE_URL: postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/data_store_api
ES_HOST: ${ES_HOST}
ES_HOSTNAME: ${ES_HOSTNAME}
DATA_BROKER_DATABASE_URL: postgresql://${BROKER_DB_USER}:${BROKER_DB_PASSWORD}@${BROKER_DB_HOST}:${BROKER_DB_PORT}/data_broker
BROKER_DB: postgresql://${BROKER_DB_USER}:${BROKER_DB_PASSWORD}@${BROKER_DB_HOST}:${BROKER_DB_PORT}/data_broker
MINIO_HOST: ${MINIO_HOST}
DOWNLOAD_DATABASE_URL: postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/data_store_api
# Location in host machine where broker src code root can be found
Expand Down Expand Up @@ -107,7 +107,7 @@ services:
environment:
DATABASE_URL: postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/data_store_api
ES_HOSTNAME: ${ES_HOSTNAME}
DATA_BROKER_DATABASE_URL: postgresql://${BROKER_DB_USER}:${BROKER_DB_PASSWORD}@${BROKER_DB_HOST}:${BROKER_DB_PORT}/data_broker
BROKER_DB: postgresql://${BROKER_DB_USER}:${BROKER_DB_PASSWORD}@${BROKER_DB_HOST}:${BROKER_DB_PORT}/data_broker
# Location in host machine where broker src code root can be found
DATA_BROKER_SRC_PATH: "${PWD}/../data-act-broker-backend"

Expand Down Expand Up @@ -233,7 +233,11 @@ services:
mkdir -p data/dti-da-public-files-nonprod/user_reference_docs
# Create the bucket within MinIO used for endpoints that list generated downloads
mkdir -p data/bulk-download
# Create the bucket for MinIO used for Spark
mkdir -p data/data/files
# Populate initial files in buckets
cp dockermount/usaspending_api/data/Data_Dictionary_Crosswalk.xlsx data/dti-da-public-files-nonprod/user_reference_docs/Data_Dictionary_Crosswalk.xlsx
cp dockermount/usaspending_api/data/COVID-19_download_readme.txt data/data/files/COVID-19_download_readme.txt
minio server --address ":10001" --console-address ":10002" /data
"
healthcheck:
Expand Down
2 changes: 1 addition & 1 deletion loading_data.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ To load in the reference data, from the same directory as manage.py:

To load certified submission data from the broker, you will need a read-only (or higher) connection string to the broker PostgreSQL database. If not running locally, you will also need to ensure your IP address has been whitelisted in the appropriate AWS Security Groups. Set this environment variable before running the **load_submission** command:

DATA_BROKER_DATABASE_URL=postgres://user:password@url:5432/data_broker
BROKER_DB=postgres://user:password@url:5432/data_broker

To load a submission from data broker database:

Expand Down
11 changes: 11 additions & 0 deletions usaspending_api/awards/delta_models/award_id_lookup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from pyspark.sql.types import LongType, StringType, StructField, StructType, BooleanType


AWARD_ID_LOOKUP_SCHEMA = StructType(
[
StructField("award_id", LongType(), False),
StructField("is_fpds", BooleanType(), False),
StructField("transaction_unique_id", StringType(), False),
StructField("generated_unique_award_id", StringType(), False),
]
)
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from usaspending_api.awards.management.sql.spark.unlinked_awards_summary_file import summary_file
from usaspending_api.awards.management.sql.spark.unlinked_assistance_file_d2 import file_d2_sql_string
from usaspending_api.awards.management.sql.spark.unlinked_accounts_file_c import file_c_sql_string
from usaspending_api.config import CONFIG
from usaspending_api.download.filestreaming.file_description import build_file_description, save_file_description
from usaspending_api.download.filestreaming.zip_file import append_files_to_zip_file
from usaspending_api.references.models.toptier_agency import ToptierAgency
Expand Down Expand Up @@ -108,9 +109,10 @@ def handle(self, *args, **options):
# Save queries as delta tables for efficiency
for delta_table_name, sql_file, final_name in self.download_file_list:
df = self.spark.sql(sql_file)
df.write.format(source="delta").mode(saveMode="overwrite").option("overwriteSchema", "True").saveAsTable(
name=delta_table_name
)
df.write.format(source="delta").mode(saveMode="overwrite").options(
overwriteSchema=True,
path=f"s3a://{CONFIG.SPARK_S3_BUCKET}/{CONFIG.DELTA_LAKE_S3_PATH}/temp/{delta_table_name}",
).saveAsTable(name=f"temp.{delta_table_name}")

for agency in toptier_agencies:
agency_name = agency["name"]
Expand Down Expand Up @@ -140,7 +142,7 @@ def process_data_copy_jobs(self, zip_file_path):
self.filepaths_to_delete.append(zip_file_path)

for delta_table_name, sql_file, final_name in self.download_file_list:
df = self.spark.sql(f"select * from {delta_table_name} where toptier_code = '{self._toptier_code}'")
df = self.spark.sql(f"select * from temp.{delta_table_name} where toptier_code = '{self._toptier_code}'")
sql_file = None
final_path = self._create_data_csv_dest_path(final_name)
intermediate_data_file_path = final_path.parent / (final_path.name + "_temp")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Generated by Django 4.2.23 on 2025-10-23 17:39

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("awards", "0113_financialaccountsbyawards_ussgl480210_rein_undel_obs_cpe_and_more"),
]

operations = [
# This recreates the table that was originally created as part of Awards migration "0100_ctodlinkageupdates.py".
# A management command was previously recreating this table without any indexes or constraints. The SQL below
# was taken directly from the sqlmigrate for the migration mentioned above. Additionally, the "NOT NULL"
# constraint is carried forward so that Django can manage the table in the following AlterField statement.
migrations.RunSQL(
sql="""
DROP TABLE IF EXISTS c_to_d_linkage_updates;
CREATE TABLE "c_to_d_linkage_updates" ("financial_accounts_by_awards_id" integer NOT NULL PRIMARY KEY, "award_id" integer NOT NULL);
"""
),
migrations.AlterField(
model_name="ctodlinkageupdates",
name="award_id",
field=models.IntegerField(null=True),
),
]
2 changes: 1 addition & 1 deletion usaspending_api/awards/models/c_to_d_linkage_updates.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
class CToDLinkageUpdates(models.Model):

financial_accounts_by_awards_id = models.IntegerField(primary_key=True)
award_id = models.IntegerField(unique=False)
award_id = models.IntegerField(unique=False, null=True)

class Meta:
managed = True
Expand Down
2 changes: 1 addition & 1 deletion usaspending_api/broker/helpers/delete_fabs_transactions.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_delete_pks_for_afa_keys(afa_ids_to_delete):
is_active is not true
"""

with connections[settings.DATA_BROKER_DB_ALIAS].cursor() as cursor:
with connections[settings.BROKER_DB_ALIAS].cursor() as cursor:
cursor.execute(sql, [uppercased])
rows = cursor.fetchall()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,16 @@ class Command(load_base.Command):
"""

help = "Derives all FABS office names from the office codes in the Office table in Data broker. The \
DATA_BROKER_DATABASE_URL environment variable must set so we can pull Office data from their db."
BROKER_DB environment variable must set so we can pull Office data from their db."

def handle(self, *args, **options):
# Grab data broker database connections
if not options["test"]:
try:
db_conn = connections[settings.DATA_BROKER_DB_ALIAS]
db_conn = connections[settings.BROKER_DB_ALIAS]
db_cursor = db_conn.cursor()
except Exception as err:
logger.critical("Could not connect to database. Is DATA_BROKER_DATABASE_URL set?")
logger.critical("Could not connect to database. Is BROKER_DB set?")
logger.critical(print(err))
raise
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def handle(self, *args, **options):
f'Copying "{broker_schema_name}"."{broker_table_name}" from Broker to '
f'"{usas_schema_name}"."{usas_table_name}" in USAspending.'
)
broker_conn = connections[settings.DATA_BROKER_DB_ALIAS]
broker_conn = connections[settings.BROKER_DB_ALIAS]
usas_conn = connections[settings.DEFAULT_DB_ALIAS]
table_exists_query = f"""
SELECT EXISTS (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def get_broker_data(table_type, fiscal_year, fy_start, fy_end, year_range=None,
fy=fiscal_year,
broker_where=broker_where,
usaspending_where=usaspending_where,
broker_server=settings.DATA_BROKER_DBLINK_NAME,
broker_server=settings.BROKER_DBLINK_NAME,
)
return sql_statement

Expand Down
2 changes: 1 addition & 1 deletion usaspending_api/broker/management/commands/update_duns.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def handle(self, *args, **options):
total_start = datetime.now()
new_update_date = total_start.strftime("%Y-%m-%d")

db_cursor = connections[settings.DATA_BROKER_DB_ALIAS].cursor()
db_cursor = connections[settings.BROKER_DB_ALIAS].cursor()

update_date_query = DUNS.objects.all().aggregate(Max("update_date"))
update_date = update_date_query["update_date__max"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from django.db import connections

from usaspending_api.common.helpers.timing_helpers import Timer
from usaspending_api.settings import DATA_BROKER_DB_ALIAS, DEFAULT_DB_ALIAS
from usaspending_api.settings import BROKER_DB_ALIAS, DEFAULT_DB_ALIAS

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -85,7 +85,7 @@ def id_ranges(self, min_id: int, max_id: int) -> Iterator[tuple[int, int]]:
yield n, min(n + self.CHUNK_SIZE, max_id)

def get_match_field_range(self) -> tuple[int, int]:
with connections[DATA_BROKER_DB_ALIAS].cursor() as cursor:
with connections[BROKER_DB_ALIAS].cursor() as cursor:
cursor.execute(
f"""
SELECT min({self.broker_match_field}), max({self.broker_match_field})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -379,7 +379,7 @@ def add_arguments(self, parser):
def handle(self, *args, **options):
logger.info("Starting historical data load...")

db_cursor = connections[settings.DATA_BROKER_DB_ALIAS].cursor()
db_cursor = connections[settings.BROKER_DB_ALIAS].cursor()
fiscal_year = options.get("fiscal_year")
page = options.get("page")
limit = options.get("limit")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


class BrokerIntegrationTestCase(TestCase):
databases = {settings.DEFAULT_DB_ALIAS, settings.DATA_BROKER_DB_ALIAS}
databases = {settings.DEFAULT_DB_ALIAS, settings.BROKER_DB_ALIAS}
dummy_table_name = "dummy_broker_table_to_be_rolled_back"

@classmethod
Expand All @@ -16,7 +16,7 @@ def setUpClass(cls):
@classmethod
def tearDownClass(cls):
# Follow-up of test_broker_transactional_test
with connections[settings.DATA_BROKER_DB_ALIAS].cursor() as cursor:
with connections[settings.BROKER_DB_ALIAS].cursor() as cursor:
cursor.execute("select * from pg_tables where tablename = '{}'".format(cls.dummy_table_name))
results = cursor.fetchall()
assert results is not None
Expand All @@ -29,7 +29,7 @@ def tearDownClass(cls):
@pytest.mark.usefixtures("broker_db_setup")
def test_can_connect_to_broker(self):
"""Simple 'integration test' that checks a Broker DB exists to integrate with"""
connection = connections[settings.DATA_BROKER_DB_ALIAS]
connection = connections[settings.BROKER_DB_ALIAS]
with connection.cursor() as cursor:
cursor.execute("SELECT now()")
results = cursor.fetchall()
Expand All @@ -50,7 +50,7 @@ def test_broker_transactional_test(self):
dummy_contents = "dummy_text"

# Make sure the table and the data get in there
connection = connections[settings.DATA_BROKER_DB_ALIAS]
connection = connections[settings.BROKER_DB_ALIAS]
with connection.cursor() as cursor:
cursor.execute("create table {} (contents text)".format(self.dummy_table_name))
cursor.execute("insert into {} values ('{}')".format(self.dummy_table_name, dummy_contents))
Expand All @@ -68,7 +68,7 @@ def test_broker_transactional_test(self):
@pytest.mark.usefixtures("broker_db_setup")
def test_broker_db_fully_setup(self):
"""Simple 'integration test' that checks a Broker DB had its schema setup"""
connection = connections[settings.DATA_BROKER_DB_ALIAS]
connection = connections[settings.BROKER_DB_ALIAS]
with connection.cursor() as cursor:
cursor.execute("select * from pg_tables where tablename = 'alembic_version'")
results = cursor.fetchall()
Expand All @@ -84,16 +84,15 @@ def test_can_connect_to_broker_by_dblink(broker_server_dblink_setup, db):
"""
connection = connections[DEFAULT_DB_ALIAS]
with connection.cursor() as cursor:
cursor.execute(f"select srvname from pg_foreign_server where srvname = '{settings.DATA_BROKER_DBLINK_NAME}'")
cursor.execute(f"select srvname from pg_foreign_server where srvname = '{settings.BROKER_DBLINK_NAME}'")
results = cursor.fetchall()
if not results or not results[0][0] == settings.DATA_BROKER_DBLINK_NAME:
if not results or not results[0][0] == settings.BROKER_DBLINK_NAME:
pytest.skip(
f"No foreign server named '{settings.DATA_BROKER_DBLINK_NAME}' has been setup on this "
f"No foreign server named '{settings.BROKER_DBLINK_NAME}' has been setup on this "
"USAspending database. Skipping the test of integration with that server via dblink"
)
cursor.execute(
f"SELECT * FROM dblink('{settings.DATA_BROKER_DBLINK_NAME}','SELECT now()') "
"AS broker_time(the_now timestamp)"
f"SELECT * FROM dblink('{settings.BROKER_DBLINK_NAME}','SELECT now()') " "AS broker_time(the_now timestamp)"
)
results = cursor.fetchall()
assert results is not None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class TestThingWithMultipleDatabases(TestCase):

@classmethod
def setUpTestData(cls):
connection = connections[settings.DATA_BROKER_DB_ALIAS]
connection = connections[settings.BROKER_DB_ALIAS]
with connection.cursor() as cursor:

cursor.execute("select count(*) from published_fabs")
Expand Down
Loading