Skip to content
This repository was archived by the owner on Sep 17, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 0 additions & 43 deletions .github/workflows/test-all.yml

This file was deleted.

34 changes: 34 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: Test Modules
on:
- push

env:
CURRENT_WORKING_ENGINE_REF: main

jobs:
build:
runs-on: ubuntu-20.04
timeout-minutes: 5
steps:
- name: Checkout opengb-modules
uses: actions/checkout@v4
with:
path: opengb-modules

- name: Checkout opengb
uses: actions/checkout@v4
with:
repository: rivet-gg/opengb
path: opengb

- name: Install Deno
uses: denoland/setup-deno@v1
with:
deno-version: "1.44.1"

- name: Install OpenGB
run: cd opengb/ && deno task cli:install

- name: Test Modules
run: cd opengb-modules/tests/basic && opengb test --strict-schemas --force-deploy-migrations

3 changes: 1 addition & 2 deletions modules/tokens/scripts/extend.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { ScriptContext } from "../module.gen.ts";
import { TokenWithSecret } from "../utils/types.ts";
import { tokenFromRow } from "../utils/types.ts";
import { TokenWithSecret, tokenFromRow } from "../utils/types.ts";

export interface Request {
token: string;
Expand Down
14 changes: 14 additions & 0 deletions modules/uploads/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { UploadSize } from "./utils/data_size.ts";

export interface Config {
maxUploadSize?: UploadSize;
maxMultipartUploadSize?: UploadSize;
maxFilesPerUpload?: number;
defaultMultipartChunkSize?: UploadSize;
}

export const DEFAULT_MAX_FILES_PER_UPLOAD = 10;

export const DEFAULT_MAX_UPLOAD_SIZE: UploadSize = "30mib";
export const DEFAULT_MAX_MULTIPART_UPLOAD_SIZE: UploadSize = "10gib";
export const DEFAULT_MULTIPART_CHUNK_SIZE: UploadSize = "10mib";
27 changes: 27 additions & 0 deletions modules/uploads/db/migrations/20240610050140_init/migration.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
-- CreateTable
CREATE TABLE "Upload" (
"id" UUID NOT NULL,
"metadata" JSONB,
"bucket" TEXT NOT NULL,
"contentLength" BIGINT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"completedAt" TIMESTAMP(3),
"deletedAt" TIMESTAMP(3),

CONSTRAINT "Upload_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "Files" (
"uploadId" UUID NOT NULL,
"multipartUploadId" TEXT,
"path" TEXT NOT NULL,
"mime" TEXT,
"contentLength" BIGINT NOT NULL,

CONSTRAINT "Files_pkey" PRIMARY KEY ("uploadId","path")
);

-- AddForeignKey
ALTER TABLE "Files" ADD CONSTRAINT "Files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "Upload"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
3 changes: 3 additions & 0 deletions modules/uploads/db/migrations/migration_lock.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (i.e. Git)
provider = "postgresql"
33 changes: 33 additions & 0 deletions modules/uploads/db/schema.prisma
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
// Do not modify this `datasource` block
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}

model Upload {
id String @id @default(uuid()) @db.Uuid
metadata Json?

bucket String
contentLength BigInt

createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
completedAt DateTime?
deletedAt DateTime?

files Files[] @relation("Files")
}

model Files {
uploadId String @db.Uuid
upload Upload @relation("Files", fields: [uploadId], references: [id])

multipartUploadId String?

path String
mime String?
contentLength BigInt

@@id([uploadId, path])
}
63 changes: 63 additions & 0 deletions modules/uploads/module.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
{
"scripts": {
"prepare": {
"name": "Prepare Upload",
"description": "Prepare an upload batch for data transfer"
},
"complete": {
"name": "Complete Upload",
"description": "Alert the module that the upload has been completed"
},
"get": {
"name": "Get Upload Metadata",
"description": "Get the metadata (including contained files) for specified upload IDs"
},
"get_public_file_urls": {
"name": "Get File Link",
"description": "Get presigned download links for each of the specified files"
},
"delete": {
"name": "Delete Upload",
"description": "Removes the upload and deletes the files from the bucket"
}
},
"errors": {
"no_files": {
"name": "No Files Provided",
"description": "An upload must have at least 1 file"
},
"too_many_files": {
"name": "Too Many Files Provided",
"description": "There is a limit to how many files can be put into a single upload (see config)"
},
"duplicate_paths": {
"name": "Duplicate Paths Provided",
"description": "An upload cannot contain 2 files with the same paths (see `cause` for offending paths)"
},
"size_limit_exceeded": {
"name": "Combined Size Limit Exceeded",
"description": "There is a maximum total size per upload (see config)"
},
"upload_not_found": {
"name": "Upload Not Found",
"description": "The provided upload ID didn't match any known existing uploads"
},
"upload_already_completed": {
"name": "Upload Already completed",
"description": "\\`complete\\` was already called on this upload"
},
"s3_not_configured": {
"name": "S3 Not Configured",
"description": "The S3 bucket is not configured (missing env variables)"
},
"too_many_chunks": {
"name": "Possibility Of Too Many Chunks",
"description": "AWS S3 has a limit on the number of parts that can be uploaded in a\nmultipart upload. This limit is 10,000 parts. If the number of chunks\nrequired to upload the maximum multipart upload size exceeds this limit,\nany operation will preemptively throw this error.\n"
},
"multipart_upload_completion_fail": {
"name": "Multipart Upload Completion Failure",
"description": "The multipart upload failed to complete (see `cause` for more information)"
}
},
"dependencies": {}
}
136 changes: 136 additions & 0 deletions modules/uploads/scripts/complete.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import { RuntimeError, ScriptContext } from "../module.gen.ts";
import {
completeMultipartUpload,
getMultipartUploadParts,
keyExists,
} from "../utils/bucket.ts";
import { getConfig } from "../utils/config_defaults.ts";
import { getKey, prismaToOutputWithFiles, Upload } from "../utils/types.ts";

export interface Request {
uploadId: string;
}

export interface Response {
upload: Upload;
}

export async function run(
ctx: ScriptContext,
req: Request,
): Promise<Response> {
const config = getConfig(ctx.userConfig);

const newUpload = await ctx.db.$transaction(async (db) => {
// Find the upload by ID
const upload = await db.upload.findFirst({
where: {
id: req.uploadId,
},
select: {
id: true,
metadata: true,
bucket: true,
contentLength: true,
files: true,
createdAt: true,
updatedAt: true,
completedAt: true,
},
});

// Error if the upload wasn't prepared
if (!upload) {
throw new RuntimeError(
"upload_not_found",
{
meta: { uploadId: req.uploadId },
},
);
}

// Error if `complete` was already called with this ID
if (upload.completedAt !== null) {
throw new RuntimeError(
"upload_already_completed",
{
meta: { uploadId: req.uploadId },
},
);
}

// Check with S3 to see if the files were uploaded
const fileExistencePromises = upload.files.map(
async (file) => {
// If the file was uploaded in parts, complete the multipart upload
if (file.multipartUploadId) {
try {
const parts = await getMultipartUploadParts(
config.s3,
getKey(upload.id, file.path),
file.multipartUploadId,
);
if (parts.length === 0) return false;

await completeMultipartUpload(
config.s3,
getKey(upload.id, file.path),
file.multipartUploadId,
parts,
);
} catch (e) {
throw new RuntimeError(
"multipart_upload_completion_fail",
{ cause: e },
);
}

return true;
} else {
// Check if the file exists
return await keyExists(config.s3, getKey(upload.id, file.path));
}
},
);
const fileExistence = await Promise.all(fileExistencePromises);
const filesAllExist = fileExistence.every(Boolean);
if (!filesAllExist) {
const missingFiles = upload.files.filter((_, i) => !fileExistence[i]);
throw new RuntimeError(
"files_not_uploaded",
{
meta: {
uploadId: req.uploadId,
missingFiles: missingFiles.map((file) => file.path),
},
},
);
}

// Update the upload to mark it as completed
const completedUpload = await db.upload.update({
where: {
id: req.uploadId,
},
data: {
completedAt: new Date(),
},
select: {
id: true,
metadata: true,
bucket: true,
contentLength: true,
files: true,
createdAt: true,
updatedAt: true,
completedAt: true,
},
});

return completedUpload;
});

return {
upload: prismaToOutputWithFiles(newUpload),
};
}
Loading