Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,4 @@ WORKDIR /app

COPY --from=builder /app/.output ./

CMD node /app/server/index.mjs
CMD ["node", "/app/server/index.mjs"]
85 changes: 34 additions & 51 deletions lib/db/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@ import type { DatabaseDriverName } from '~/lib/db/drivers'
import cluster from 'node:cluster'

import { hash } from 'node:crypto'
import { createSingletonPromise } from '@antfu/utils'
import { Kysely, Migrator } from 'kysely'
import { getDatabaseDriver } from '~/lib/db/drivers'
import { migrations } from '~/lib/db/migrations'
import { ENV } from '~/lib/env'

import { ENV } from '~/lib/env'
import { logger } from '~/lib/logger'

export interface CacheKeysTable {
Expand All @@ -23,12 +24,10 @@ export interface UploadsTable {
key: string
version: string
id: string
driver_upload_id: string
}
export interface UploadPartsTable {
upload_id: string
part_number: number
e_tag: string | null
}

export interface MetaTable {
Expand All @@ -43,62 +42,46 @@ export interface Database {
meta: MetaTable
}

let _db: Kysely<Database>

let initializationPromise: Promise<void> | undefined
export async function initializeDatabase() {
if (initializationPromise) return initializationPromise

// eslint-disable-next-line unicorn/consistent-function-scoping
const init = async () => {
const driverName = ENV.DB_DRIVER
const driverSetup = getDatabaseDriver(driverName)
if (!driverSetup) {
logger.error(`No database driver found for ${driverName}`)
// eslint-disable-next-line unicorn/no-process-exit
process.exit(1)
}
if (cluster.isPrimary) logger.info(`Using database driver: ${driverName}`)
export const useDB = createSingletonPromise(async () => {
const driverName = ENV.DB_DRIVER
const driverSetup = getDatabaseDriver(driverName)
if (!driverSetup) {
logger.error(`No database driver found for ${driverName}`)
// eslint-disable-next-line unicorn/no-process-exit
process.exit(1)
}
if (cluster.isPrimary) logger.info(`Using database driver: ${driverName}`)

const driver = await driverSetup()
const driver = await driverSetup()

_db = new Kysely<Database>({
dialect: driver,
})
const db = new Kysely<Database>({
dialect: driver,
})

if (cluster.isPrimary) {
logger.info('Migrating database...')
const migrator = new Migrator({
db: _db,
provider: {
async getMigrations() {
return migrations(driverName as DatabaseDriverName)
},
if (cluster.isPrimary) {
logger.info('Migrating database...')
const migrator = new Migrator({
db,
provider: {
async getMigrations() {
return migrations(driverName as DatabaseDriverName)
},
})
const { error, results } = await migrator.migrateToLatest()
if (error) {
logger.error('Database migration failed', error)
// eslint-disable-next-line unicorn/no-process-exit
process.exit(1)
}
logger.debug('Migration results', results)
logger.success('Database migrated')
},
})
const { error, results } = await migrator.migrateToLatest()
if (error) {
logger.error('Database migration failed', error)
// eslint-disable-next-line unicorn/no-process-exit
process.exit(1)
}
logger.debug('Migration results', results)
logger.success('Database migrated')
}

initializationPromise = init()
return initializationPromise
}

export async function useDB() {
if (!_db) {
await initializeDatabase()
}
return _db
}
return db
})

type DB = typeof _db
type DB = Awaited<ReturnType<typeof useDB>>

/**
* @see https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key
Expand Down
6 changes: 6 additions & 0 deletions lib/db/migrations.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,11 @@ export function migrations(dbType: DatabaseDriverName) {
await db.schema.dropTable('meta').ifExists().execute()
},
},
$3_remove_unused_columns: {
async up(db) {
await db.schema.alterTable('uploads').dropColumn('driver_upload_id').execute()
await db.schema.alterTable('upload_parts').dropColumn('e_tag').execute()
},
},
} satisfies Record<string, Migration>
}
87 changes: 50 additions & 37 deletions lib/storage/defineStorageDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,53 +2,66 @@

import type { Readable } from 'node:stream'
import type { z } from 'zod'
import path from 'node:path'

import { formatZodError } from '~/lib/env'
import { logger } from '~/lib/logger'

interface PartDetails {
partNumber: number
eTag?: string
}
export abstract class StorageDriver {
static baseFolder = 'gh-actions-cache'
static uploadFolder = '.uploads'
baseFolderPrefix: string | undefined

constructor(baseFolderPrefix?: string) {
this.baseFolderPrefix = baseFolderPrefix
}

addBaseFolderPrefix(objectName: string) {
return path.join(this.baseFolderPrefix ?? '', StorageDriver.baseFolder, objectName)
}

getUploadFolderPrefix(uploadId: string) {
return path.join(
this.baseFolderPrefix ?? '',
StorageDriver.baseFolder,
StorageDriver.uploadFolder,
uploadId,
)
}

addUploadFolderPrefix(opts: { uploadId: string; objectName: string }) {
return path.join(this.getUploadFolderPrefix(opts.uploadId), opts.objectName)
}

export interface StorageDriver {
initiateMultiPartUpload: (opts: { objectName: string; totalSize: number }) => Promise<string>
uploadPart: (opts: {
objectName: string
getUploadPartObjectName(opts: { uploadId: string; partNumber: number }) {
return this.addUploadFolderPrefix({
uploadId: opts.uploadId,
objectName: `part_${opts.partNumber}`,
})
}

abstract delete(objectNames: string[]): Promise<void>
abstract createReadStream(objectName: string): Promise<ReadableStream | Readable>
abstract createDownloadUrl?(objectName: string): Promise<string>
abstract uploadPart(opts: {
uploadId: string
partNumber: number
data: ReadableStream
chunkStart: number
}) => Promise<{
eTag: string | null
}>
completeMultipartUpload: (opts: {
objectName: string
}): Promise<void>
abstract completeMultipartUpload(opts: {
finalOutputObjectName: string
uploadId: string
parts: PartDetails[]
}) => Promise<void>
abortMultipartUpload: (opts: { objectName: string; uploadId: string }) => Promise<void>
download: (opts: { objectName: string }) => Promise<ReadableStream | Readable>
createDownloadUrl?: (opts: { objectName: string }) => Promise<string>
delete: (opts: { objectNames: string[] }) => Promise<void>
partNumbers: number[]
}): Promise<void>
abstract cleanupMultipartUpload(uploadId: string): Promise<void>
}

interface DefineStorageDriverOptions<EnvSchema extends z.ZodTypeAny> {
envSchema: EnvSchema
setup: (options: z.output<EnvSchema>) => Promise<StorageDriver> | StorageDriver
}
export function defineStorageDriver<EnvSchema extends z.ZodTypeAny>(
options: DefineStorageDriverOptions<EnvSchema>,
) {
return () => {
const env = options.envSchema.safeParse(process.env)
if (!env.success) {
logger.error(`Invalid environment variables:\n${formatZodError(env.error)}`)
// eslint-disable-next-line unicorn/no-process-exit
process.exit(1)
}

const driver = options.setup(env.data)
return driver instanceof Promise ? driver : Promise.resolve(driver)
export function parseEnv<Schema extends z.ZodTypeAny>(schema: Schema) {
const env = schema.safeParse(process.env)
if (!env.success) {
logger.error(`Invalid environment variables:\n${formatZodError(env.error)}`)
// eslint-disable-next-line unicorn/no-process-exit
process.exit(1)
}
return env.data as z.output<Schema>
}
Loading