Skip to content

Commit 3d3b72d

Browse files
SIMSBIOHUB-850: Implement Crunchy DB (#338)
1 parent bf057b4 commit 3d3b72d

File tree

23 files changed

+823
-302
lines changed

23 files changed

+823
-302
lines changed

.github/workflows/deployStatic.yml

Lines changed: 1 addition & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -120,62 +120,6 @@ jobs:
120120
docker build -t ${{ vars.OPENSHIFT_REGISTRY }}/${{ vars.OPENSHIFT_LICENSE_PLATE }}-tools/$APP_NAME:${{ env.IMAGE_TAG }} .
121121
docker push ${{ vars.OPENSHIFT_REGISTRY }}/${{ vars.OPENSHIFT_LICENSE_PLATE }}-tools/$APP_NAME:${{ env.IMAGE_TAG }}
122122
123-
# Build the Database image
124-
buildAndPushDatabase:
125-
name: Build Database Image
126-
runs-on: ubuntu-latest
127-
timeout-minutes: 20
128-
if: ${{ github.event.pull_request.merged == true || github.event_name == 'workflow_dispatch' }}
129-
env:
130-
IMAGE_TAG: build-1.0.0-${{ needs.checkoutRepo.outputs.timestamp }}-${{ github.base_ref || github.ref_name }}
131-
BRANCH: ${{ github.base_ref || github.ref_name }}
132-
APP_NAME: "biohub-platform-db"
133-
needs:
134-
- checkoutRepo
135-
steps:
136-
# Load repo from cache
137-
- name: Cache repo
138-
uses: actions/cache@v4
139-
id: cache-repo
140-
env:
141-
cache-name: cache-repo
142-
with:
143-
path: ${{ github.workspace }}/*
144-
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.event.pull_request.head.sha || github.sha }}
145-
146-
# Checkout the branch if not restored via cache
147-
- name: Checkout Target Branch
148-
if: steps.cache-repo.outputs.cache-hit != 'true'
149-
uses: actions/checkout@v6
150-
151-
# Install oc, which was removed from the ubuntu-latest image in v24.04
152-
- name: Install OpenShift CLI tools
153-
uses: redhat-actions/openshift-tools-installer@v1
154-
with:
155-
oc: "4.16"
156-
157-
# Log in to OpenShift
158-
# Note: The secrets needed to log in are NOT available if the PR comes from a FORK.
159-
# PR's must originate from a branch off the original repo or else all openshift `oc` commands will fail.
160-
- name: Log in to OpenShift
161-
uses: redhat-actions/oc-login@v1
162-
with:
163-
openshift_server_url: https://api.silver.devops.gov.bc.ca:6443
164-
openshift_token: ${{ secrets.TOOLS_SA_TOKEN }}
165-
namespace: ${{ vars.OPENSHIFT_LICENSE_PLATE }}-${{ env.BRANCH }}
166-
167-
# Authenticate Docker with OpenShift registry
168-
- name: Authenticate Docker with OpenShift registry
169-
run: |
170-
echo ${{ secrets.TOOLS_SA_TOKEN }} | docker login -u unused --password-stdin ${{ vars.OPENSHIFT_REGISTRY }}
171-
172-
# Build and push the database image using Docker
173-
- name: Build and Push Database Image
174-
working-directory: database
175-
run: |
176-
docker build -t ${{ vars.OPENSHIFT_REGISTRY }}/${{ vars.OPENSHIFT_LICENSE_PLATE }}-tools/$APP_NAME:${{ env.IMAGE_TAG }} .
177-
docker push ${{ vars.OPENSHIFT_REGISTRY }}/${{ vars.OPENSHIFT_LICENSE_PLATE }}-tools/$APP_NAME:${{ env.IMAGE_TAG }}
178-
179123
# Build the Database Setup image
180124
buildAndPushDatabaseSetup:
181125
name: Build Database Setup Image
@@ -300,7 +244,6 @@ jobs:
300244
needs:
301245
- checkoutRepo
302246
- buildAndPushApp
303-
- buildAndPushDatabase
304247
- buildAndPushDatabaseSetup
305248
- buildAndPushAPI
306249
steps:
@@ -358,15 +301,6 @@ jobs:
358301
-f ./infrastructure/biohub-platform/values-$BRANCH.yaml \
359302
--set-string biohub-platform-app.environment.ts=$TS \
360303
--set-string biohub-platform-app.environment.changeId=$BUILD_TAG \
361-
--set-string biohub-platform-db.environment.ts=$TS \
362-
--set-string biohub-platform-db.environment.changeId=$BUILD_TAG \
363-
--set-string biohub-platform-db.app.postgresAdmin=${{ secrets.DATABASE_ADMIN }} \
364-
--set-string biohub-platform-db.app.postgresAdminPassword=${{ secrets.DATABASE_ADMIN_PASSWORD }} \
365-
--set-string biohub-platform-db.app.postgresUser=${{ secrets.DATABASE_USER }} \
366-
--set-string biohub-platform-db.app.postgresUserPassword=${{ secrets.DATABASE_USER_PASSWORD }} \
367-
--set-string biohub-platform-db.app.postgresUserApi=${{ secrets.DATABASE_USER_API }} \
368-
--set-string biohub-platform-db.app.postgresUserApiPassword=${{ secrets.DATABASE_USER_API_PASSWORD }} \
369-
--set-string biohub-platform-db.app.postgresDb=${{ secrets.DATABASE_NAME }} \
370304
--set-string biohub-platform-db-setup.environment.ts=$TS \
371305
--set-string biohub-platform-db-setup.environment.changeId=$BUILD_TAG \
372306
--set-string biohub-platform-api.environment.ts=$TS \
@@ -405,7 +339,7 @@ jobs:
405339
set -e
406340
KEEP=3
407341
SUFFIX="-$BRANCH"
408-
IMAGES="biohub-platform-app biohub-platform-api biohub-platform-db biohub-platform-db-setup"
342+
IMAGES="biohub-platform-app biohub-platform-api biohub-platform-db-setup"
409343
echo "Pruning image tags for environment $BRANCH in $TOOLS_NAMESPACE (keeping $KEEP most recent by date)"
410344
for IMAGE in $IMAGES; do
411345
echo "Checking image stream $IMAGE..."

api/src/app.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import { initialize } from 'express-openapi';
33
import multer from 'multer';
44
import { OpenAPIV3 } from 'openapi-types';
55
import swaggerUIExperss from 'swagger-ui-express';
6-
import { defaultPoolConfig, initDBPool } from './database/db';
6+
import { getDefaultPoolConfig, initDBPool } from './database/db';
77
import { initDBConstants } from './database/db-constants';
88
import { ensureHTTPError, HTTP400, HTTP500 } from './errors/http-error';
99
import { rootAPIDoc } from './openapi/root-api-doc';
@@ -154,7 +154,7 @@ app.use('/api-docs', swaggerUIExperss.serve, swaggerUIExperss.setup(openAPIFrame
154154

155155
// Start api
156156
async function main() {
157-
initDBPool(defaultPoolConfig);
157+
initDBPool(getDefaultPoolConfig());
158158
await initDBConstants();
159159
await initPgBoss();
160160

api/src/database/db.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import knex, { Knex } from 'knex';
2+
import * as fs from 'node:fs';
23
import * as pg from 'pg';
34
import { SQLStatement } from 'sql-template-strings';
45
import { z } from 'zod';
@@ -35,6 +36,12 @@ export const defaultPoolConfig: pg.PoolConfig = {
3536
idleTimeoutMillis: DB_IDLE_TIMEOUT
3637
};
3738

39+
/** Default pool config with SSL (CA verification) when PG_SSL_CA_PATH is set. */
40+
export const getDefaultPoolConfig = (): pg.PoolConfig => ({
41+
...defaultPoolConfig,
42+
...(process.env.PG_SSL_CA_PATH && { ssl: { ca: fs.readFileSync(process.env.PG_SSL_CA_PATH) } })
43+
});
44+
3845
// Custom type handler for psq `DATE` type to prevent local time/zone information from being added.
3946
// Why? By default, node-postgres assumes local time/zone for any psql `DATE` or `TIME` types that don't have timezone information.
4047
// This Can lead to unexpected behavior when the original psql `DATE` value was intentionally omitting time/zone information.

database/src/knexfile.ts

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,8 @@
1+
import * as fs from 'node:fs';
2+
3+
// Use SSL with CA verification when PG_SSL_CA_PATH is set (Crunchy); otherwise no SSL (legacy/PR).
4+
const sslConfig = process.env.PG_SSL_CA_PATH ? { ca: fs.readFileSync(process.env.PG_SSL_CA_PATH) } : false;
5+
16
export default {
27
development: {
38
client: 'pg',
@@ -6,7 +11,8 @@ export default {
611
port: process.env.DB_PORT,
712
database: process.env.DB_DATABASE,
813
user: process.env.DB_ADMIN,
9-
password: process.env.DB_ADMIN_PASS
14+
password: process.env.DB_ADMIN_PASS,
15+
...(sslConfig && { ssl: sslConfig })
1016
},
1117
pool: {
1218
min: 2,
@@ -28,7 +34,8 @@ export default {
2834
port: process.env.DB_PORT,
2935
database: process.env.DB_DATABASE,
3036
user: process.env.DB_ADMIN,
31-
password: process.env.DB_ADMIN_PASS
37+
password: process.env.DB_ADMIN_PASS,
38+
...(sslConfig && { ssl: sslConfig })
3239
},
3340
pool: {
3441
min: 2,

database/src/migrations/20260113000000_add_codeset_feature_type.ts

Lines changed: 76 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,9 @@ import { Knex } from 'knex';
1010
* Note: Property names remain unchanged - existing properties will now receive values
1111
* in the format 'code::{table_name}::{id}' instead of plain string values.
1212
*
13+
* Inserts are idempotent (WHERE NOT EXISTS) so the migration can be re-run safely
14+
* after a pg_restore without duplicate key errors.
15+
*
1316
* @export
1417
* @param {Knex} knex
1518
* @return {*} {Promise<void>}
@@ -20,76 +23,102 @@ export async function up(knex: Knex): Promise<void> {
2023
set search_path=biohub,public;
2124
2225
----------------------------------------------------------------------------------------
23-
-- Insert codeset feature type
26+
-- Insert codeset feature type (idempotent)
2427
----------------------------------------------------------------------------------------
2528
INSERT INTO feature_type (name, display_name, description)
26-
VALUES (
27-
'codeset',
28-
'Codeset',
29-
'A code table containing standardized categories and code values.'
29+
SELECT 'codeset', 'Codeset', 'A code table containing standardized categories and code values.'
30+
WHERE NOT EXISTS (
31+
SELECT 1 FROM feature_type
32+
WHERE name = 'codeset' AND record_end_date IS NULL
3033
);
3134
3235
----------------------------------------------------------------------------------------
33-
-- Insert categories feature property
36+
-- Insert categories feature property (idempotent)
3437
----------------------------------------------------------------------------------------
35-
INSERT INTO
38+
INSERT INTO
3639
feature_property (feature_property_type_id, name, display_name, description, calculated_value)
37-
VALUES (
38-
(SELECT feature_property_type_id FROM feature_property_type WHERE name = 'object'),
40+
SELECT
41+
(SELECT feature_property_type_id FROM feature_property_type WHERE name = 'object' AND record_end_date IS NULL),
3942
'categories',
4043
'Categories',
4144
'An object of categories, each containing standardized code values.',
4245
false
46+
WHERE NOT EXISTS (
47+
SELECT 1 FROM feature_property
48+
WHERE name = 'categories' AND record_end_date IS NULL
4349
);
4450
4551
----------------------------------------------------------------------------------------
46-
-- Link categories property to codeset feature type
52+
-- Link categories property to codeset feature type (idempotent)
4753
----------------------------------------------------------------------------------------
48-
INSERT INTO
54+
INSERT INTO
4955
feature_type_property (feature_type_id, feature_property_id, required_value)
50-
VALUES (
51-
(SELECT feature_type_id FROM feature_type WHERE name = 'codeset'),
52-
(SELECT feature_property_id FROM feature_property WHERE name = 'categories'),
56+
SELECT
57+
(SELECT feature_type_id FROM feature_type WHERE name = 'codeset' AND record_end_date IS NULL),
58+
(SELECT feature_property_id FROM feature_property WHERE name = 'categories' AND record_end_date IS NULL),
5359
true
60+
WHERE NOT EXISTS (
61+
SELECT 1 FROM feature_type_property ftp
62+
WHERE ftp.feature_type_id = (SELECT feature_type_id FROM feature_type WHERE name = 'codeset' AND record_end_date IS NULL)
63+
AND ftp.feature_property_id = (SELECT feature_property_id FROM feature_property WHERE name = 'categories' AND record_end_date IS NULL)
64+
AND ftp.record_end_date IS NULL
5465
);
5566
5667
----------------------------------------------------------------------------------------
57-
-- Insert partnership properties for dataset feature type
68+
-- Insert partnership properties for dataset feature type (idempotent)
5869
----------------------------------------------------------------------------------------
59-
INSERT INTO
70+
INSERT INTO
71+
feature_property (feature_property_type_id, name, display_name, description, calculated_value)
72+
SELECT (SELECT feature_property_type_id FROM feature_property_type WHERE name = 'array' AND record_end_date IS NULL),
73+
'indigenous_partnerships',
74+
'Indigenous Partnerships',
75+
'An array of indigenous partnerships',
76+
false
77+
WHERE NOT EXISTS (
78+
SELECT 1 FROM feature_property
79+
WHERE name = 'indigenous_partnerships' AND record_end_date IS NULL
80+
);
81+
82+
INSERT INTO
6083
feature_property (feature_property_type_id, name, display_name, description, calculated_value)
61-
VALUES
62-
(
63-
(SELECT feature_property_type_id FROM feature_property_type WHERE name = 'array'),
64-
'indigenous_partnerships',
65-
'Indigenous Partnerships',
66-
'An array of indigenous partnerships',
67-
false
68-
),
69-
(
70-
(SELECT feature_property_type_id FROM feature_property_type WHERE name = 'array'),
71-
'stakeholder_partnerships',
72-
'Stakeholder Partnerships',
73-
'An array of stakeholder partnerships',
74-
false
75-
);
76-
77-
----------------------------------------------------------------------------------------
78-
-- Link partnership properties to dataset feature type
79-
----------------------------------------------------------------------------------------
80-
INSERT INTO
84+
SELECT (SELECT feature_property_type_id FROM feature_property_type WHERE name = 'array' AND record_end_date IS NULL),
85+
'stakeholder_partnerships',
86+
'Stakeholder Partnerships',
87+
'An array of stakeholder partnerships',
88+
false
89+
WHERE NOT EXISTS (
90+
SELECT 1 FROM feature_property
91+
WHERE name = 'stakeholder_partnerships' AND record_end_date IS NULL
92+
);
93+
94+
----------------------------------------------------------------------------------------
95+
-- Link partnership properties to dataset feature type (idempotent)
96+
----------------------------------------------------------------------------------------
97+
INSERT INTO
8198
feature_type_property (feature_type_id, feature_property_id, required_value)
82-
VALUES
83-
(
84-
(SELECT feature_type_id FROM feature_type WHERE name = 'dataset'),
85-
(SELECT feature_property_id FROM feature_property WHERE name = 'indigenous_partnerships'),
86-
false
87-
),
88-
(
89-
(SELECT feature_type_id FROM feature_type WHERE name = 'dataset'),
90-
(SELECT feature_property_id FROM feature_property WHERE name = 'stakeholder_partnerships'),
91-
false
92-
);
99+
SELECT
100+
(SELECT feature_type_id FROM feature_type WHERE name = 'dataset' AND record_end_date IS NULL),
101+
(SELECT feature_property_id FROM feature_property WHERE name = 'indigenous_partnerships' AND record_end_date IS NULL),
102+
false
103+
WHERE NOT EXISTS (
104+
SELECT 1 FROM feature_type_property ftp
105+
WHERE ftp.feature_type_id = (SELECT feature_type_id FROM feature_type WHERE name = 'dataset' AND record_end_date IS NULL)
106+
AND ftp.feature_property_id = (SELECT feature_property_id FROM feature_property WHERE name = 'indigenous_partnerships' AND record_end_date IS NULL)
107+
AND ftp.record_end_date IS NULL
108+
);
109+
110+
INSERT INTO
111+
feature_type_property (feature_type_id, feature_property_id, required_value)
112+
SELECT
113+
(SELECT feature_type_id FROM feature_type WHERE name = 'dataset' AND record_end_date IS NULL),
114+
(SELECT feature_property_id FROM feature_property WHERE name = 'stakeholder_partnerships' AND record_end_date IS NULL),
115+
false
116+
WHERE NOT EXISTS (
117+
SELECT 1 FROM feature_type_property ftp
118+
WHERE ftp.feature_type_id = (SELECT feature_type_id FROM feature_type WHERE name = 'dataset' AND record_end_date IS NULL)
119+
AND ftp.feature_property_id = (SELECT feature_property_id FROM feature_property WHERE name = 'stakeholder_partnerships' AND record_end_date IS NULL)
120+
AND ftp.record_end_date IS NULL
121+
);
93122
`);
94123
}
95124

0 commit comments

Comments
 (0)