Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -457,6 +457,29 @@ jobs:
DREVOPS_DEPLOY_PR_HEAD=$CIRCLE_SHA1 \
./scripts/drevops/deploy.sh
no_output_timeout: 30m
- run:
name: Trigger GitHub workflow for Quant Cloud deployment
command: |
if [ -n "${GITHUB_TOKEN}" ]; then
payload=$(jq -nc --arg branch "${CIRCLE_BRANCH}" --arg sha "${CIRCLE_SHA1}" --arg url "${CIRCLE_BUILD_URL}" \
'{event_type:"circleci_success", client_payload:{branch:$branch, sha:$sha, build_url:$url}}')
resp=$(mktemp)
code=$(curl -sS -f -o "$resp" -w "%{http_code}" \
-X POST \
-H "Accept: application/vnd.github+json" \
-H "Authorization: token ${GITHUB_TOKEN}" \
"https://api.github.com/repos/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/dispatches" \
-d "$payload" || true)
if [ "${code:-0}" -ge 200 ] && [ "${code:-0}" -lt 300 ]; then
echo "Triggered Quant GitHub deploy workflow for branch: ${CIRCLE_BRANCH}"
else
echo "Failed to trigger GitHub workflow (HTTP ${code:-0}). Response:"
cat "$resp"
exit 1
fi
else
echo "GITHUB_TOKEN not set - skipping GitHub workflow trigger"
fi
- store_artifacts:
path: *artifacts

Expand Down Expand Up @@ -540,6 +563,7 @@ jobs:
done
fi


################################################################################
# WORKFLOWS
################################################################################
Expand Down
23 changes: 23 additions & 0 deletions .docker/config/nginx/location_drupal_prepend_host.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Set HTTP_HOST for PHP using canonical host from CDN headers
# Priority: HTTP_QUANT_ORIG_HOST > HTTP_X_FORWARDED_HOST > original Host header (preserving port)

# Default to the incoming Host header so non-standard ports stay intact (e.g. :8080)
set $final_host $http_host;

# Use X-Forwarded-Host when provided (reverse proxies)
if ($http_x_forwarded_host != "") {
set $final_host $http_x_forwarded_host;
}

# Override with Quant-Orig-Host when available
if ($http_quant_orig_host != "") {
set $final_host $http_quant_orig_host;
}

# Extract first host from comma-separated X-Forwarded-Host values
if ($final_host ~ "^([^,\s]+)") {
set $final_host $1;
}

# Always pass the calculated host; without override it matches the original Host header
fastcgi_param HTTP_HOST $final_host;
10 changes: 10 additions & 0 deletions .docker/entrypoints/cli/03-provision-site.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,13 @@ set -e
# NOTE: This provision script has been disabled because:
# - rsync commands try to connect to Lagoon SSH which is not available in Quant Cloud
# - Use DREVOPS_PROVISION_SKIP=1 environment variable to skip provision steps

# Delegate Drupal provisioning to the Quant-aware script. The standard
# DrevOps provision script is not compatible with Quant Cloud because it relies
# on Lagoon-specific tooling (e.g., rsync to Lagoon SSH).
if [ -x "./scripts/quant/provision-quant.sh" ]; then
./scripts/quant/provision-quant.sh
else
echo "Quant provisioning script missing or not executable." >&2
exit 1
fi
Comment on lines +32 to +40
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Do not fail when the Quant script is absent; degrade gracefully to avoid breaking non‑Quant environments.

Current behavior exits 1, which can regress existing Lagoon/non‑Quant flows.

Apply this diff:

-if [ -x "./scripts/quant/provision-quant.sh" ]; then
-  ./scripts/quant/provision-quant.sh
-else
-  echo "Quant provisioning script missing or not executable." >&2
-  exit 1
-fi
+if [ -x "./scripts/quant/provision-quant.sh" ]; then
+  ./scripts/quant/provision-quant.sh
+else
+  echo "Quant provisioning script missing or not executable; skipping." >&2
+fi

Alternatively, gate on a Quant env var (e.g., QUANT_ENV_TYPE) before delegating.

📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
# Delegate Drupal provisioning to the Quant-aware script. The standard
# DrevOps provision script is not compatible with Quant Cloud because it relies
# on Lagoon-specific tooling (e.g., rsync to Lagoon SSH).
if [ -x "./scripts/quant/provision-quant.sh" ]; then
./scripts/quant/provision-quant.sh
else
echo "Quant provisioning script missing or not executable." >&2
exit 1
fi
# Delegate Drupal provisioning to the Quant-aware script. The standard
# DrevOps provision script is not compatible with Quant Cloud because it relies
# on Lagoon-specific tooling (e.g., rsync to Lagoon SSH).
if [ -x "./scripts/quant/provision-quant.sh" ]; then
./scripts/quant/provision-quant.sh
else
echo "Quant provisioning script missing or not executable; skipping." >&2
fi
🤖 Prompt for AI Agents
In .docker/entrypoints/cli/03-provision-site.sh around lines 32–40, the script
currently exits with status 1 if the Quant provisioning script is missing, which
breaks non‑Quant flows; change the logic to degrade gracefully by either (A)
gating execution on a Quant-specific env var (e.g., only attempt to run
./scripts/quant/provision-quant.sh when QUANT_ENV_TYPE or a similar flag
indicates a Quant environment), or (B) if you prefer automatic detection, keep
the executable check but remove the exit: log a warning that the Quant script is
missing/not executable and continue without failing so existing Lagoon/non‑Quant
provisioning proceeds; ensure any downstream provisioning still runs when
skipping the Quant script.

5 changes: 4 additions & 1 deletion .docker/nginx-drupal.dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,7 @@ ENV WEBROOT=${WEBROOT}

RUN apk add --no-cache tzdata

COPY --from=cli /app /app
# Copy custom nginx configuration for CDN header handling
COPY .docker/config/nginx/location_drupal_prepend_host.conf /etc/nginx/conf.d/drupal/
RUN chmod 0644 /etc/nginx/conf.d/drupal/location_drupal_prepend_host.conf
COPY --from=cli /app /app
2 changes: 1 addition & 1 deletion .github/workflows/build-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ name: Build and Push civictheme-monorepo-drupal to Quant Cloud
- develop
- quant-cloud-migration
- feature/*
- pr-*
- content/*
- pr-*
tags:
- '*'
pull_request:
Expand Down
227 changes: 227 additions & 0 deletions scripts/quant/download-db.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,227 @@
#!/usr/bin/env bash
# Download the latest Quant Cloud database backup, decompress it, and report paths.
set -euo pipefail

if ! command -v qc >/dev/null 2>&1; then
echo "Quant CLI (qc) is required but was not found. Install it first." >&2
exit 1
fi

QUANT_ORG_NAME="${QUANT_ORG_NAME:-salsa-digital}"
Comment on lines +5 to +10
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Guard for Node.js >= 18 (ESM + global fetch) to avoid runtime failure.

The Node block relies on ESM top‑level await and global fetch. Add a preflight check.

Apply this diff:

 if ! command -v qc >/dev/null 2>&1; then
   echo "Quant CLI (qc) is required but was not found. Install it first." >&2
   exit 1
 fi

+if ! command -v node >/dev/null 2>&1; then
+  echo "Node.js >= 18 is required but was not found. Please install it." >&2
+  exit 1
+fi
+NODE_MAJOR="$(node -e "console.log(parseInt(process.versions.node.split('.')[0], 10))")"
+if [ "${NODE_MAJOR}" -lt 18 ]; then
+  echo "Node.js >= 18 is required (for ESM and global fetch). Current: $(node -v)" >&2
+  exit 1
+fi
+
 QUANT_ORG_NAME="${QUANT_ORG_NAME:-salsa-digital}"
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
if ! command -v qc >/dev/null 2>&1; then
echo "Quant CLI (qc) is required but was not found. Install it first." >&2
exit 1
fi
QUANT_ORG_NAME="${QUANT_ORG_NAME:-salsa-digital}"
if ! command -v qc >/dev/null 2>&1; then
echo "Quant CLI (qc) is required but was not found. Install it first." >&2
exit 1
fi
if ! command -v node >/dev/null 2>&1; then
echo "Node.js ≥ 18 is required but was not found. Please install it." >&2
exit 1
fi
NODE_MAJOR="$(node -e "console.log(parseInt(process.versions.node.split('.')[0], 10))")"
if [ "${NODE_MAJOR}" -lt 18 ]; then
echo "Node.js ≥ 18 is required (for ESM and global fetch). Current: $(node -v)" >&2
exit 1
fi
QUANT_ORG_NAME="${QUANT_ORG_NAME:-salsa-digital}"
🤖 Prompt for AI Agents
In scripts/quant/download-db.sh around lines 5 to 10, the script may later run a
Node.js ESM script that requires Node >= 18 for top-level await and global
fetch, but there is no preflight check; add a guard that verifies the node
executable exists and that its major version is >= 18 (parse `node --version`),
print a clear error to stderr instructing to install/upgrade Node.js if the
check fails, and exit non‑zero; keep the existing qc check and ensure the new
Node check runs before any Node-based operations.


if ! qc org select "${QUANT_ORG_NAME}" >/dev/null 2>&1; then
echo "Failed to select Quant organisation '${QUANT_ORG_NAME}'. Run \`qc org list\` to verify access." >&2
exit 1
fi

echo "Downloading database from Quant Cloud. If you need Lagoon, run \`ahoy download-db-lagoon\`."

QUANT_APP_NAME="${QUANT_APP_NAME:-${LAGOON_PROJECT:-qld-bsc}}"
# QUANT_APP_NAME is the canonical Quant application identifier; falls back to LAGOON_PROJECT for legacy workflows.
QUANT_ENVIRONMENT="${QUANT_ENVIRONMENT:-production}"
# Backward compatibility: expose LAGOON_PROJECT for legacy tooling until everything is Quant-aware.
if [ -z "${LAGOON_PROJECT:-}" ]; then
export LAGOON_PROJECT="${QUANT_APP_NAME}"
fi

DREVOPS_DB_DIR="${DREVOPS_DB_DIR:-./.data}"
DREVOPS_DB_FILE="${DREVOPS_DB_FILE:-db.sql}"
QUANT_DOWNLOAD_DIR="${QUANT_DOWNLOAD_DIR:-./downloads}"
QUANT_BACKUP_POLL_INTERVAL="${QUANT_BACKUP_POLL_INTERVAL:-15}"
QUANT_BACKUP_TIMEOUT="${QUANT_BACKUP_TIMEOUT:-900}"
QUANT_BACKUP_DESCRIPTION="${QUANT_BACKUP_DESCRIPTION:-Database backup triggered by ahoy download-db on $(date -u +'%Y-%m-%dT%H:%M:%SZ')}"

mkdir -p "${DREVOPS_DB_DIR}" "${QUANT_DOWNLOAD_DIR}"

QC_BIN_REALPATH="$(python3 - <<'PY'
import os
import shutil
path = shutil.which('qc')
if not path:
raise SystemExit('qc binary not found')
print(os.path.realpath(path))
PY
)"
QUANT_CLI_MODULE_DIR="$(dirname "${QC_BIN_REALPATH}")"
export QUANT_CLI_MODULE_DIR
export QUANT_APP_NAME QUANT_ENVIRONMENT QUANT_DOWNLOAD_DIR DREVOPS_DB_DIR DREVOPS_DB_FILE
export QUANT_BACKUP_POLL_INTERVAL QUANT_BACKUP_TIMEOUT QUANT_BACKUP_DESCRIPTION

NODE_OUTPUT="$(node --input-type=module <<'NODE'
import fs from 'fs';
import path from 'path';
import { pipeline } from 'stream/promises';
import { createGunzip } from 'zlib';
import { pathToFileURL } from 'url';
const hasGzipMagicNumber = (filePath) => {
let fd;
try {
fd = fs.openSync(filePath, 'r');
const buffer = Buffer.alloc(2);
const bytesRead = fs.readSync(fd, buffer, 0, 2, 0);
return bytesRead === 2 && buffer[0] === 0x1f && buffer[1] === 0x8b;
} catch (error) {
return false;
} finally {
if (fd !== undefined) {
fs.closeSync(fd);
}
}
};
const moduleDir = process.env.QUANT_CLI_MODULE_DIR;
if (!moduleDir) {
console.error('[quant] QUANT_CLI_MODULE_DIR not set. Cannot proceed.');
process.exit(1);
}
const toNumber = (value, fallback) => {
const num = Number(value);
return Number.isFinite(num) && num > 0 ? num : fallback;
};
const pollIntervalSec = toNumber(process.env.QUANT_BACKUP_POLL_INTERVAL, 15);
const timeoutSec = toNumber(process.env.QUANT_BACKUP_TIMEOUT, 900);
const downloadDir = process.env.QUANT_DOWNLOAD_DIR || './downloads';
const decompressDir = process.env.DREVOPS_DB_DIR || './.data';
const decompressFile = process.env.DREVOPS_DB_FILE || 'db.sql';
const appName = process.env.QUANT_APP_NAME || process.env.QUANT_APPLICATION || process.env.LAGOON_PROJECT || '';
const envName = process.env.QUANT_ENVIRONMENT || '';
const description = process.env.QUANT_BACKUP_DESCRIPTION || `Database backup triggered by ahoy download-db on ${new Date().toISOString()}`;
const { ApiClient } = await import(pathToFileURL(path.join(moduleDir, 'utils/api.js')).href);
const client = await ApiClient.create();
const orgId = client.defaultOrganizationId;
const appId = appName || client.defaultApplicationId;
const envId = envName || client.defaultEnvironmentId;
if (!orgId || !appId || !envId) {
console.error('[quant] Missing organization, application, or environment context. Use `qc login` and ensure defaults are set.');
process.exit(1);
}
console.error(`[quant] Creating database backup for app "${appId}" environment "${envId}"...`);
const createResp = await client.backupManagementApi.createBackup(orgId, appId, envId, 'database', { description });
const backup = createResp.body || {};
const backupId = backup.id || backup.backupId;
if (!backupId) {
console.error('[quant] API did not return a backup ID.');
process.exit(1);
}
const normalizeStatus = (value) => (value || '').toLowerCase();
let lastStatus = normalizeStatus(backup.status) || 'requested';
console.error(`[quant] Backup requested (ID: ${backupId}). Initial status: ${lastStatus}.`);
const pollIntervalMs = Math.max(5, pollIntervalSec) * 1000;
const timeoutMs = Math.max(pollIntervalMs, timeoutSec * 1000);
const start = Date.now();
while (true) {
const listResp = await client.backupManagementApi.listBackups(orgId, appId, envId, 'database');
const backups = listResp.body?.backups ?? [];
const current = backups.find((b) => (b.backupId || b.id) === backupId);
if (current) {
const status = normalizeStatus(current.status);
if (status !== lastStatus) {
console.error(`[quant] Backup status: ${status}`);
lastStatus = status;
}
if (status === 'completed') {
console.error('[quant] Backup completed.');
break;
}
if (status === 'failed') {
console.error('[quant] Backup failed.');
process.exit(1);
}
} else {
console.error('[quant] Backup not yet visible in list; waiting...');
}
if (Date.now() - start > timeoutMs) {
console.error(`[quant] Backup did not complete within ${Math.round(timeoutMs / 1000)} seconds.`);
process.exit(1);
}
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
}
console.error('[quant] Requesting download URL...');
const downloadResp = await client.backupManagementApi.downloadBackup(orgId, appId, envId, 'database', backupId);
const downloadData = downloadResp.body || {};
const downloadUrl = downloadData.downloadUrl;
const filename = downloadData.filename || `${backupId}.sql.gz`;
if (!downloadUrl) {
console.error('[quant] API did not return a download URL.');
process.exit(1);
}
const outputDir = path.resolve(downloadDir);
fs.mkdirSync(outputDir, { recursive: true });
const downloadPath = path.join(outputDir, filename);
console.error(`[quant] Downloading backup to ${downloadPath}...`);
const response = await fetch(downloadUrl);
if (!response.ok || !response.body) {
console.error(`[quant] Failed to download backup: HTTP ${response.status}`);
process.exit(1);
}
const fileStream = fs.createWriteStream(downloadPath);
await pipeline(response.body, fileStream);
console.error('[quant] Download complete.');
const stats = fs.statSync(downloadPath);
const decompressedPath = path.resolve(decompressDir, decompressFile);
fs.mkdirSync(path.dirname(decompressedPath), { recursive: true });
fs.rmSync(decompressedPath, { force: true });
const lowerCaseFilename = filename.toLowerCase();
const backupIsLikelyGzip = lowerCaseFilename.endsWith('.gz') || lowerCaseFilename.endsWith('.gzip') || hasGzipMagicNumber(downloadPath);
if (backupIsLikelyGzip) {
console.error(`[quant] Decompressing backup to ${decompressedPath}...`);
await pipeline(
fs.createReadStream(downloadPath),
createGunzip(),
fs.createWriteStream(decompressedPath)
);
console.error('[quant] Decompression complete.');
} else {
console.error(`[quant] Backup is already uncompressed; copying to ${decompressedPath}...`);
await pipeline(fs.createReadStream(downloadPath), fs.createWriteStream(decompressedPath));
console.error('[quant] Copy complete.');
}
const toShellValue = (value) => {
if (value === undefined || value === null) {
return '';
}
if (typeof value === 'number') {
return String(value);
}
return JSON.stringify(value);
};
console.log(`BACKUP_ID=${toShellValue(backupId)}`);
console.log(`DOWNLOAD_PATH=${toShellValue(downloadPath)}`);
console.log(`DECOMPRESSED_PATH=${toShellValue(decompressedPath)}`);
console.log(`DOWNLOAD_SIZE_BYTES=${toShellValue(stats.size)}`);
NODE
)"
if [ -z "${NODE_OUTPUT}" ]; then
echo "Failed to download backup from Quant Cloud." >&2
exit 1
fi

eval "${NODE_OUTPUT}"

echo "Quant backup ${BACKUP_ID} downloaded to ${DOWNLOAD_PATH}"
echo "Database decompressed to ${DECOMPRESSED_PATH}"
Loading