diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1139c935b..a58511078 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: options: --privileged services: postgres: - image: postgres:13.11-alpine + image: postgres:16-alpine env: POSTGRES_DB: spi_test POSTGRES_USER: spi_test @@ -61,7 +61,7 @@ jobs: # runs-on: macOS-latest # services: # postgres: - # image: postgres:13.11 + # image: postgres:16-alpine # env: # POSTGRES_DB: spi_dev # POSTGRES_USER: spi_dev diff --git a/LOCAL_DEVELOPMENT_SETUP.md b/LOCAL_DEVELOPMENT_SETUP.md index c018f266c..b25af8007 100644 --- a/LOCAL_DEVELOPMENT_SETUP.md +++ b/LOCAL_DEVELOPMENT_SETUP.md @@ -40,8 +40,6 @@ Close the scheme editor and run the application by selecting "Run" from the Xcod When working locally, it's helpful to have a database with pre-populated data from the live system. [Talk to us on Discord](https://discord.gg/vQRb6KkYRw), and we'll supply you with a recent database dump that you can load with `./scripts/load-db.sh`. -**Note:** Running the `load-db.sh` script requires that the Postgres command line tools are available on your system. The easiest way to get these is with [brew](https://brew.sh/) by running `brew install postgresql`. - ### Setup the Front End Once the back end is set up and the server is running, the next step is to set up the front end to serve the CSS and JavaScript. We use [esbuild](https://esbuild.github.io) to build our front end files. However, you do not need to install Node or any other tools locally unless you are doing extensive work with the front-end files in this project. For small changes or to get started with front-end changes in this project, use our Docker-based scripts. diff --git a/Makefile b/Makefile index fcd04add7..8667a5c69 100644 --- a/Makefile +++ b/Makefile @@ -95,8 +95,11 @@ analyze: db-up: db-up-dev db-up-test db-up-dev: - docker run --name spi_dev -e POSTGRES_DB=spi_dev -e POSTGRES_USER=spi_dev -e POSTGRES_PASSWORD=xxx -p 6432:5432 -d postgres:13.11-alpine + docker run --name spi_dev -e POSTGRES_DB=spi_dev -e POSTGRES_USER=spi_dev -e POSTGRES_PASSWORD=xxx -p 6432:5432 -d postgres:16-alpine +# Keep test db on postgres:13 for now, to make local testing faster. See +# https://github.com/SwiftPackageIndex/SwiftPackageIndex-Server/issues/3360#issuecomment-2331103211 +# for details db-up-test: docker run --name spi_test \ -e POSTGRES_DB=spi_test \ @@ -106,19 +109,7 @@ db-up-test: --tmpfs /pgdata:rw,noexec,nosuid,size=1024m \ -p 5432:5432 \ -d \ - postgres:13.11-alpine - -db-up-test-log-statement: - docker run --name spi_test \ - -e POSTGRES_DB=spi_test \ - -e POSTGRES_USER=spi_test \ - -e POSTGRES_PASSWORD=xxx \ - -e PGDATA=/pgdata \ - --tmpfs /pgdata:rw,noexec,nosuid,size=1024m \ - -p 5432:5432 \ - --rm \ - postgres:13.11-alpine \ - postgres -c log_statement=all + postgres:13-alpine db-down: db-down-dev db-down-test diff --git a/Tests/AppTests/AppTestCase.swift b/Tests/AppTests/AppTestCase.swift index 303aa353d..b42611827 100644 --- a/Tests/AppTests/AppTestCase.swift +++ b/Tests/AppTests/AppTestCase.swift @@ -12,9 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. +@testable import App + +import NIOConcurrencyHelpers +import PostgresNIO import SQLKit import XCTVapor -@testable import App + class AppTestCase: XCTestCase { var app: Application! @@ -23,9 +27,16 @@ class AppTestCase: XCTestCase { override func setUp() async throws { try await super.setUp() app = try await setup(.testing) + + // Always start with a baseline mock environment to avoid hitting live resources + Current = .mock(eventLoop: app.eventLoopGroup.next()) + Current.setLogger(.init(label: "test", factory: { _ in logger })) - // Silence app logging - app.logger = .init(label: "noop") { _ in SwiftLogNoOpLogHandler() } + } + + func setup(_ environment: Environment) async throws -> Application { + try await Self.setupDb(environment) + return try await Self.setupApp(environment) } override func tearDown() async throws { @@ -35,6 +46,98 @@ class AppTestCase: XCTestCase { } +extension AppTestCase { + + static func setupApp(_ environment: Environment) async throws -> Application { + let app = try await Application.make(environment) + try await configure(app) + + // Silence app logging + app.logger = .init(label: "noop") { _ in SwiftLogNoOpLogHandler() } + + return app + } + + + static func setupDb(_ environment: Environment) async throws { + await DotEnvFile.load(for: environment, fileio: .init(threadPool: .singleton)) + + // Ensure DATABASE_HOST is from a restricted set db hostnames and nothing else. + // This is safeguard against accidental inheritance of setup in QueryPerformanceTests + // and to ensure the database resetting cannot impact any other network hosts. + let host = Environment.get("DATABASE_HOST") + precondition(["localhost", "postgres", "host.docker.internal"].contains(host), + "DATABASE_HOST must be a local db, was: \(host)") + + let testDbName = Environment.get("DATABASE_NAME")! + let snapshotName = testDbName + "_snapshot" + + // Create initial db snapshot on first run + try await snapshotCreated.withValue { snapshotCreated in + if !snapshotCreated { + try await createSchema(environment, databaseName: testDbName) + try await createSnapshot(original: testDbName, snapshot: snapshotName, environment: environment) + snapshotCreated = true + } + } + + try await restoreSnapshot(original: testDbName, snapshot: snapshotName, environment: environment) + } + + + static func createSchema(_ environment: Environment, databaseName: String) async throws { + do { + try await withDatabase("postgres", .testing) { // Connect to `postgres` db in order to reset the test db + try await $0.query(PostgresQuery(unsafeSQL: "DROP DATABASE IF EXISTS \(databaseName) WITH (FORCE)")) + try await $0.query(PostgresQuery(unsafeSQL: "CREATE DATABASE \(databaseName)")) + } + + do { // Use autoMigrate to spin up the schema + let app = try await Application.make(environment) + app.logger = .init(label: "noop") { _ in SwiftLogNoOpLogHandler() } + try await configure(app) + try await app.autoMigrate() + try await app.asyncShutdown() + } + } catch { + print("Create schema failed with error: ", String(reflecting: error)) + throw error + } + } + + + static func createSnapshot(original: String, snapshot: String, environment: Environment) async throws { + do { + try await withDatabase("postgres", environment) { client in + try await client.query(PostgresQuery(unsafeSQL: "DROP DATABASE IF EXISTS \(snapshot) WITH (FORCE)")) + try await client.query(PostgresQuery(unsafeSQL: "CREATE DATABASE \(snapshot) TEMPLATE \(original)")) + } + } catch { + print("Create snapshot failed with error: ", String(reflecting: error)) + throw error + } + } + + + static func restoreSnapshot(original: String, snapshot: String, environment: Environment) async throws { + // delete db and re-create from snapshot + do { + try await withDatabase("postgres", environment) { client in + try await client.query(PostgresQuery(unsafeSQL: "DROP DATABASE IF EXISTS \(original) WITH (FORCE)")) + try await client.query(PostgresQuery(unsafeSQL: "CREATE DATABASE \(original) TEMPLATE \(snapshot)")) + } + } catch { + print("Restore snapshot failed with error: ", String(reflecting: error)) + throw error + } + } + + + static let snapshotCreated = ActorIsolated(false) + +} + + extension AppTestCase { func renderSQL(_ builder: SQLSelectBuilder) -> String { renderSQL(builder.query) @@ -69,3 +172,29 @@ extension AppTestCase { } } } + + +private func connect(to databaseName: String, _ environment: Environment) async throws -> PostgresClient { + await DotEnvFile.load(for: environment, fileio: .init(threadPool: .singleton)) + let host = Environment.get("DATABASE_HOST")! + let port = Environment.get("DATABASE_PORT").flatMap(Int.init)! + let username = Environment.get("DATABASE_USERNAME")! + let password = Environment.get("DATABASE_PASSWORD")! + + let config = PostgresClient.Configuration(host: host, port: port, username: username, password: password, database: databaseName, tls: .disable) + + return .init(configuration: config) +} + + +private func withDatabase(_ databaseName: String, _ environment: Environment, _ query: @escaping (PostgresClient) async throws -> Void) async throws { + let client = try await connect(to: databaseName, environment) + try await withThrowingTaskGroup(of: Void.self) { taskGroup in + taskGroup.addTask { await client.run() } + + try await query(client) + + taskGroup.cancelAll() + } +} + diff --git a/Tests/AppTests/Util.swift b/Tests/AppTests/Util.swift index f97ce98ba..669ce7a2e 100644 --- a/Tests/AppTests/Util.swift +++ b/Tests/AppTests/Util.swift @@ -23,67 +23,6 @@ import NIOConcurrencyHelpers // MARK: - Test helpers -private let _schemaCreated = NIOLockedValueBox(false) - -func setup(_ environment: Environment, resetDb: Bool = true) async throws -> Application { - let app = try await Application.make(environment) - let host = try await configure(app) - - // Ensure `.testing` refers to "postgres" or "localhost" - precondition(["localhost", "postgres", "host.docker.internal"].contains(host), - ".testing must be a local db, was: \(host)") - - app.logger.logLevel = Environment.get("LOG_LEVEL").flatMap(Logger.Level.init(rawValue:)) ?? .warning - - if !(_schemaCreated.withLockedValue { $0 }) { - // ensure we create the schema when running the first test - try await app.autoMigrate() - _schemaCreated.withLockedValue { $0 = true } - } - if resetDb { try await _resetDb(app) } - - // Always start with a baseline mock environment to avoid hitting live resources - Current = .mock(eventLoop: app.eventLoopGroup.next()) - - return app -} - - -private let tableNamesCache: NIOLockedValueBox<[String]?> = .init(nil) - -func _resetDb(_ app: Application) async throws { - guard let db = app.db as? SQLDatabase else { - fatalError("Database must be an SQLDatabase ('as? SQLDatabase' must succeed)") - } - - guard let tables = tableNamesCache.withLockedValue({ $0 }) else { - struct Row: Decodable { var table_name: String } - let tableNames = try await db.raw(""" - SELECT table_name FROM - information_schema.tables - WHERE - table_schema NOT IN ('pg_catalog', 'information_schema', 'public._fluent_migrations') - AND table_schema NOT LIKE 'pg_toast%' - AND table_name NOT LIKE '_fluent_%' - """) - .all(decoding: Row.self) - .map(\.table_name) - tableNamesCache.withLockedValue { $0 = tableNames } - try await _resetDb(app) - return - } - - for table in tables { - try await db.raw("TRUNCATE TABLE \(ident: table) CASCADE").run() - } - - try await RecentPackage.refresh(on: app.db) - try await RecentRelease.refresh(on: app.db) - try await Search.refresh(on: app.db) - try await Stats.refresh(on: app.db) - try await WeightedKeyword.refresh(on: app.db) -} - func fixtureString(for fixture: String) throws -> String { String(decoding: try fixtureData(for: fixture), as: UTF8.self) diff --git a/scripts/convert_to_db_dump.sh b/scripts/convert_to_db_dump.sh deleted file mode 100755 index d2aee517c..000000000 --- a/scripts/convert_to_db_dump.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/sh - -# Copyright Dave Verwer, Sven A. Schmidt, and other contributors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eu - -TARFILE=$1 -DUMPFILE=$2 -PG_IMAGE=postgres:13.11-alpine - -[[ -n $DATABASE_PASSWORD ]] || (echo "DATABASE_PASSWORD not set" && exit 1) -[[ -n $DB_BACKUP_DIR ]] || (echo "DB_BACKUP_DIR not set" && exit 1) -[[ -n $ENV ]] || (echo "ENV not set" && exit 1) - -# Create docker network (ignoring "already exists") in order to -# discover database at hostname "backup-db" -docker network create backup || true - -echo "Unpacking ..." - -tar xfz "$TARFILE" - -echo "Launching backup db ..." - -docker run --rm --name backup-db -d \ - -v "$PWD/db_data:/var/lib/postgresql/data" \ - --network backup \ - $PG_IMAGE - -echo "Exporting to $DUMPFILE ..." - -RETRIES=30 -until docker run --rm --name pg_dump \ - -v "$PWD":/host \ - --network backup \ - --env PGPASSWORD="$DATABASE_PASSWORD" \ - $PG_IMAGE \ - pg_dump --no-owner -Fc -f /host/"$DUMPFILE" \ - -h backup-db -U spi_"${ENV}" spi_"${ENV}" \ - || [ $RETRIES -eq 0 ]; do - echo "Waiting for postgres server, $((RETRIES-=1)) remaining attempts..." - sleep 5 - echo "Retrying ..." -done - -echo "Moving file" -mv "$DUMPFILE" "$DB_BACKUP_DIR" - -echo "Cleaning up" -rm "$TARFILE" -rm -rf db_data -docker rm -f backup-db diff --git a/scripts/db_backup.sh b/scripts/db_backup.sh deleted file mode 100755 index 2e91cdc7e..000000000 --- a/scripts/db_backup.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/sh - -# Copyright Dave Verwer, Sven A. Schmidt, and other contributors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -u - -TARFILE=$1 - -echo "Backing up database to $PWD/$TARFILE ..." - -docker run --rm \ - -v "$PWD":/host \ - -v spi_db_data:/db_data \ - -w /host \ - ubuntu \ - tar cfz "$TARFILE" /db_data - -echo "done." - -# don't let tar errors or warnings bubble up -exit 0 diff --git a/scripts/download-prod-db.sh b/scripts/download-prod-db.sh deleted file mode 100755 index b4b87dae0..000000000 --- a/scripts/download-prod-db.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -# Copyright Dave Verwer, Sven A. Schmidt, and other contributors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Download today's backup from the production database. -# This is the recommended way to grab the database, if you have SSH access. -scp spi@208.52.185.253:~/Desktop/DB-Backups/spi_prod_$(date +%Y-%m-%d).dump . diff --git a/scripts/load-db.sh b/scripts/load-db.sh index 8c03b521d..2d99b233a 100755 --- a/scripts/load-db.sh +++ b/scripts/load-db.sh @@ -16,11 +16,18 @@ set -eu IMPORT_FILE=$1 + +POSTGRES_IMAGE="postgres:16-alpine" + docker rm -f spi_dev -docker run --name spi_dev -e POSTGRES_DB=spi_dev -e POSTGRES_USER=spi_dev -e POSTGRES_PASSWORD=xxx -p 6432:5432 -d postgres:13.11-alpine +docker run --name spi_dev -e POSTGRES_DB=spi_dev -e POSTGRES_USER=spi_dev -e POSTGRES_PASSWORD=xxx -p 6432:5432 -d $POSTGRES_IMAGE echo "Giving Postgres a moment to launch ..." sleep 5 + echo "Creating Azure roles" -PGPASSWORD=xxx psql -h "${HOST:-localhost}" -p 6432 -U spi_dev -d spi_dev -c 'CREATE ROLE azure_pg_admin; CREATE ROLE azuresu;' +psql="docker run --rm -v $PWD:/host -w /host --network=host -e PGPASSWORD=xxx $POSTGRES_IMAGE psql" +$psql -h "${HOST:-localhost}" -p 6432 -U spi_dev -d spi_dev -c 'CREATE ROLE azure_pg_admin; CREATE ROLE azuresu;' + echo "Importing" -PGPASSWORD=xxx pg_restore --no-owner -h "${HOST:-localhost}" -p 6432 -U spi_dev -d spi_dev < "$IMPORT_FILE" +pg_restore="docker run --rm -i -v $PWD:/host -w /host --network=host -e PGPASSWORD=xxx $POSTGRES_IMAGE pg_restore" +$pg_restore --no-owner -h "${HOST:-localhost}" -p 6432 -U spi_dev -d spi_dev < "$IMPORT_FILE"