Skip to content
This repository was archived by the owner on Sep 17, 2024. It is now read-only.

Commit 34ef6be

Browse files
Blckbrry-PiNathanFlurry
authored andcommitted
feat: Create uploads module
1 parent c7a6789 commit 34ef6be

File tree

21 files changed

+2615
-20
lines changed

21 files changed

+2615
-20
lines changed

.github/workflows/test-all.yml

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,41 +3,32 @@ on:
33
- push
44

55
env:
6-
CURRENT_WORKING_ENGINE_COMMIT: aa839aee9011ece7d6a133dee984727748bd3cbf
6+
CURRENT_WORKING_ENGINE_REF: main
77

88
jobs:
99
build:
1010
runs-on: ubuntu-20.04
1111
timeout-minutes: 5
1212
steps:
13-
# Checkout registry repo
14-
- name: Checkout registry Repo
13+
- name: Checkout opengb-modules
1514
uses: actions/checkout@v4
1615
with:
1716
path: opengb-modules
1817

19-
# Get engine repo to test against
20-
- name: Fetch engine repo
18+
- name: Checkout opengb
2119
uses: actions/checkout@v4
2220
with:
2321
repository: rivet-gg/opengb
24-
ssh-key: ${{ secrets.GH_DEPLOY_KEY }}
2522
path: opengb
2623

27-
# Get a version of the engine that we know works
28-
- name: Checkout to working commit
29-
run: cd opengb/ && git checkout $CURRENT_WORKING_ENGINE_COMMIT
30-
31-
# Install Deno to run OpenGB
3224
- name: Install Deno
3325
uses: denoland/setup-deno@v1
3426
with:
35-
deno-version: "1.41.1"
27+
deno-version: "1.44.1"
3628

37-
# Install OpenGB
3829
- name: Install OpenGB
3930
run: cd opengb/ && deno task cli:install
4031

41-
# Run tests on all modules in the registry
42-
- name: Run Tests for all modules
43-
run: cd ./opengb-modules/tests/basic && opengb test --strict-schemas --force-deploy-migrations
32+
- name: Test Modules
33+
run: cd opengb-modules/tests/basic && opengb test --strict-schemas --force-deploy-migrations
34+

modules/tokens/scripts/extend.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import { ScriptContext } from "../module.gen.ts";
2-
import { TokenWithSecret } from "../utils/types.ts";
3-
import { tokenFromRow } from "../utils/types.ts";
2+
import { TokenWithSecret, tokenFromRow } from "../utils/types.ts";
43

54
export interface Request {
65
token: string;

modules/uploads/config.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
import { UploadSize } from "./utils/data_size.ts";
2+
3+
export interface Config {
4+
maxUploadSize?: UploadSize;
5+
maxMultipartUploadSize?: UploadSize;
6+
maxFilesPerUpload?: number;
7+
defaultMultipartChunkSize?: UploadSize;
8+
}
9+
10+
export const DEFAULT_MAX_FILES_PER_UPLOAD = 10;
11+
12+
export const DEFAULT_MAX_UPLOAD_SIZE: UploadSize = "30mib";
13+
export const DEFAULT_MAX_MULTIPART_UPLOAD_SIZE: UploadSize = "10gib";
14+
export const DEFAULT_MULTIPART_CHUNK_SIZE: UploadSize = "10mib";
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
-- CreateTable
2+
CREATE TABLE "Upload" (
3+
"id" UUID NOT NULL,
4+
"metadata" JSONB,
5+
"bucket" TEXT NOT NULL,
6+
"contentLength" BIGINT NOT NULL,
7+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
8+
"updatedAt" TIMESTAMP(3) NOT NULL,
9+
"completedAt" TIMESTAMP(3),
10+
"deletedAt" TIMESTAMP(3),
11+
12+
CONSTRAINT "Upload_pkey" PRIMARY KEY ("id")
13+
);
14+
15+
-- CreateTable
16+
CREATE TABLE "Files" (
17+
"uploadId" UUID NOT NULL,
18+
"multipartUploadId" TEXT,
19+
"path" TEXT NOT NULL,
20+
"mime" TEXT,
21+
"contentLength" BIGINT NOT NULL,
22+
23+
CONSTRAINT "Files_pkey" PRIMARY KEY ("uploadId","path")
24+
);
25+
26+
-- AddForeignKey
27+
ALTER TABLE "Files" ADD CONSTRAINT "Files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "Upload"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Please do not edit this file manually
2+
# It should be added in your version-control system (i.e. Git)
3+
provider = "postgresql"

modules/uploads/db/schema.prisma

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
// Do not modify this `datasource` block
2+
datasource db {
3+
provider = "postgresql"
4+
url = env("DATABASE_URL")
5+
}
6+
7+
model Upload {
8+
id String @id @default(uuid()) @db.Uuid
9+
metadata Json?
10+
11+
bucket String
12+
contentLength BigInt
13+
14+
createdAt DateTime @default(now())
15+
updatedAt DateTime @updatedAt
16+
completedAt DateTime?
17+
deletedAt DateTime?
18+
19+
files Files[] @relation("Files")
20+
}
21+
22+
model Files {
23+
uploadId String @db.Uuid
24+
upload Upload @relation("Files", fields: [uploadId], references: [id])
25+
26+
multipartUploadId String?
27+
28+
path String
29+
mime String?
30+
contentLength BigInt
31+
32+
@@id([uploadId, path])
33+
}

modules/uploads/module.json

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
{
2+
"scripts": {
3+
"prepare": {
4+
"name": "Prepare Upload",
5+
"description": "Prepare an upload batch for data transfer"
6+
},
7+
"complete": {
8+
"name": "Complete Upload",
9+
"description": "Alert the module that the upload has been completed"
10+
},
11+
"get": {
12+
"name": "Get Upload Metadata",
13+
"description": "Get the metadata (including contained files) for specified upload IDs"
14+
},
15+
"get_public_file_urls": {
16+
"name": "Get File Link",
17+
"description": "Get presigned download links for each of the specified files"
18+
},
19+
"delete": {
20+
"name": "Delete Upload",
21+
"description": "Removes the upload and deletes the files from the bucket"
22+
}
23+
},
24+
"errors": {
25+
"no_files": {
26+
"name": "No Files Provided",
27+
"description": "An upload must have at least 1 file"
28+
},
29+
"too_many_files": {
30+
"name": "Too Many Files Provided",
31+
"description": "There is a limit to how many files can be put into a single upload (see config)"
32+
},
33+
"duplicate_paths": {
34+
"name": "Duplicate Paths Provided",
35+
"description": "An upload cannot contain 2 files with the same paths (see `cause` for offending paths)"
36+
},
37+
"size_limit_exceeded": {
38+
"name": "Combined Size Limit Exceeded",
39+
"description": "There is a maximum total size per upload (see config)"
40+
},
41+
"upload_not_found": {
42+
"name": "Upload Not Found",
43+
"description": "The provided upload ID didn't match any known existing uploads"
44+
},
45+
"upload_already_completed": {
46+
"name": "Upload Already completed",
47+
"description": "\\`complete\\` was already called on this upload"
48+
},
49+
"s3_not_configured": {
50+
"name": "S3 Not Configured",
51+
"description": "The S3 bucket is not configured (missing env variables)"
52+
},
53+
"too_many_chunks": {
54+
"name": "Possibility Of Too Many Chunks",
55+
"description": "AWS S3 has a limit on the number of parts that can be uploaded in a\nmultipart upload. This limit is 10,000 parts. If the number of chunks\nrequired to upload the maximum multipart upload size exceeds this limit,\nany operation will preemptively throw this error.\n"
56+
},
57+
"multipart_upload_completion_fail": {
58+
"name": "Multipart Upload Completion Failure",
59+
"description": "The multipart upload failed to complete (see `cause` for more information)"
60+
}
61+
},
62+
"dependencies": {}
63+
}
Lines changed: 136 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
import { RuntimeError, ScriptContext } from "../module.gen.ts";
2+
import {
3+
completeMultipartUpload,
4+
getMultipartUploadParts,
5+
keyExists,
6+
} from "../utils/bucket.ts";
7+
import { getConfig } from "../utils/config_defaults.ts";
8+
import { getKey, prismaToOutputWithFiles, Upload } from "../utils/types.ts";
9+
10+
export interface Request {
11+
uploadId: string;
12+
}
13+
14+
export interface Response {
15+
upload: Upload;
16+
}
17+
18+
export async function run(
19+
ctx: ScriptContext,
20+
req: Request,
21+
): Promise<Response> {
22+
const config = getConfig(ctx.userConfig);
23+
24+
const newUpload = await ctx.db.$transaction(async (db) => {
25+
// Find the upload by ID
26+
const upload = await db.upload.findFirst({
27+
where: {
28+
id: req.uploadId,
29+
},
30+
select: {
31+
id: true,
32+
metadata: true,
33+
bucket: true,
34+
contentLength: true,
35+
files: true,
36+
createdAt: true,
37+
updatedAt: true,
38+
completedAt: true,
39+
},
40+
});
41+
42+
// Error if the upload wasn't prepared
43+
if (!upload) {
44+
throw new RuntimeError(
45+
"upload_not_found",
46+
{
47+
meta: { uploadId: req.uploadId },
48+
},
49+
);
50+
}
51+
52+
// Error if `complete` was already called with this ID
53+
if (upload.completedAt !== null) {
54+
throw new RuntimeError(
55+
"upload_already_completed",
56+
{
57+
meta: { uploadId: req.uploadId },
58+
},
59+
);
60+
}
61+
62+
// Check with S3 to see if the files were uploaded
63+
const fileExistencePromises = upload.files.map(
64+
async (file) => {
65+
// If the file was uploaded in parts, complete the multipart upload
66+
if (file.multipartUploadId) {
67+
try {
68+
const parts = await getMultipartUploadParts(
69+
config.s3,
70+
getKey(upload.id, file.path),
71+
file.multipartUploadId,
72+
);
73+
if (parts.length === 0) return false;
74+
75+
await completeMultipartUpload(
76+
config.s3,
77+
getKey(upload.id, file.path),
78+
file.multipartUploadId,
79+
parts,
80+
);
81+
} catch (e) {
82+
throw new RuntimeError(
83+
"multipart_upload_completion_fail",
84+
{ cause: e },
85+
);
86+
}
87+
88+
return true;
89+
} else {
90+
// Check if the file exists
91+
return await keyExists(config.s3, getKey(upload.id, file.path));
92+
}
93+
},
94+
);
95+
const fileExistence = await Promise.all(fileExistencePromises);
96+
const filesAllExist = fileExistence.every(Boolean);
97+
if (!filesAllExist) {
98+
const missingFiles = upload.files.filter((_, i) => !fileExistence[i]);
99+
throw new RuntimeError(
100+
"files_not_uploaded",
101+
{
102+
meta: {
103+
uploadId: req.uploadId,
104+
missingFiles: missingFiles.map((file) => file.path),
105+
},
106+
},
107+
);
108+
}
109+
110+
// Update the upload to mark it as completed
111+
const completedUpload = await db.upload.update({
112+
where: {
113+
id: req.uploadId,
114+
},
115+
data: {
116+
completedAt: new Date(),
117+
},
118+
select: {
119+
id: true,
120+
metadata: true,
121+
bucket: true,
122+
contentLength: true,
123+
files: true,
124+
createdAt: true,
125+
updatedAt: true,
126+
completedAt: true,
127+
},
128+
});
129+
130+
return completedUpload;
131+
});
132+
133+
return {
134+
upload: prismaToOutputWithFiles(newUpload),
135+
};
136+
}

0 commit comments

Comments
 (0)