Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 22 additions & 1 deletion .github/workflows/docker-build-web.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,17 @@ jobs:
echo "NEXT_PUBLIC_DOCKER_BUILD=true" >> .env
echo "NEXT_PUBLIC_CAP_AWS_BUCKET=capso" >> .env
echo "NEXT_PUBLIC_CAP_AWS_REGION=us-east-1" >> .env
cat .env
# Add required build-time environment variables
echo "DATABASE_URL=mysql://build:build@build:3306/build" >> .env
echo "CAP_AWS_BUCKET=build-bucket" >> .env
echo "CAP_AWS_REGION=us-east-1" >> .env
echo "CAP_AWS_ACCESS_KEY=build-access-key" >> .env
echo "CAP_AWS_SECRET_KEY=build-secret-key" >> .env
echo "NEXTAUTH_SECRET=build-nextauth-secret-placeholder-32-chars" >> .env
echo "NEXTAUTH_URL=https://build.placeholder.com" >> .env
echo "CAP_AWS_ENDPOINT=https://build.placeholder.com" >> .env
echo "S3_PUBLIC_ENDPOINT=https://build.placeholder.com" >> .env
echo "S3_INTERNAL_ENDPOINT=https://build.placeholder.com" >> .env
Comment on lines +41 to +51
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Don’t write secrets to .env and avoid baking them into images; use GitHub Secrets (and BuildKit secrets) instead.

  • Avoid committing/echoing credentials (DB URL, AWS keys, NEXTAUTH_SECRET) to a file and passing them as ARGs (persistable in layers).
  • Pass non-secret build-time values as build-args; provide secrets via ${{ secrets.* }} (masked) and preferably BuildKit secrets: with matching Dockerfile mounts.

Minimal safer change (no Dockerfile edits), move secrets from .env to masked build-args and stop writing them to .env:

       - name: Create .env file
         run: |
           echo "WEB_URL=http://localhost:3000" > .env
           echo "NEXT_PUBLIC_DOCKER_BUILD=true" >> .env
           echo "NEXT_PUBLIC_CAP_AWS_BUCKET=capso" >> .env
           echo "NEXT_PUBLIC_CAP_AWS_REGION=us-east-1" >> .env
-          # Add required build-time environment variables
-          echo "DATABASE_URL=mysql://build:build@build:3306/build" >> .env
-          echo "CAP_AWS_BUCKET=build-bucket" >> .env
-          echo "CAP_AWS_REGION=us-east-1" >> .env
-          echo "CAP_AWS_ACCESS_KEY=build-access-key" >> .env
-          echo "CAP_AWS_SECRET_KEY=build-secret-key" >> .env
-          echo "NEXTAUTH_SECRET=build-nextauth-secret-placeholder-32-chars" >> .env
-          echo "NEXTAUTH_URL=https://build.placeholder.com" >> .env
-          echo "CAP_AWS_ENDPOINT=https://build.placeholder.com" >> .env
-          echo "S3_PUBLIC_ENDPOINT=https://build.placeholder.com" >> .env
-          echo "S3_INTERNAL_ENDPOINT=https://build.placeholder.com" >> .env
+          # Keep secrets out of .env in CI—provide via masked build-args below

And mask build-args with repo/environment secrets:

           build-args: |
-            DATABASE_URL=mysql://build:build@build:3306/build
-            CAP_AWS_BUCKET=build-bucket
-            CAP_AWS_REGION=us-east-1
-            CAP_AWS_ACCESS_KEY=build-access-key
-            CAP_AWS_SECRET_KEY=build-secret-key
-            NEXTAUTH_SECRET=build-nextauth-secret-placeholder-32-chars
-            NEXTAUTH_URL=https://build.placeholder.com
-            CAP_AWS_ENDPOINT=https://build.placeholder.com
-            S3_PUBLIC_ENDPOINT=https://build.placeholder.com
-            S3_INTERNAL_ENDPOINT=https://build.placeholder.com
+            DATABASE_URL=${{ secrets.BUILD_DATABASE_URL }}
+            CAP_AWS_BUCKET=${{ secrets.CAP_AWS_BUCKET }}
+            CAP_AWS_REGION=${{ secrets.CAP_AWS_REGION }}
+            CAP_AWS_ACCESS_KEY=${{ secrets.CAP_AWS_ACCESS_KEY }}
+            CAP_AWS_SECRET_KEY=${{ secrets.CAP_AWS_SECRET_KEY }}
+            NEXTAUTH_SECRET=${{ secrets.NEXTAUTH_SECRET }}
+            NEXTAUTH_URL=${{ secrets.NEXTAUTH_URL }}
+            CAP_AWS_ENDPOINT=${{ secrets.CAP_AWS_ENDPOINT }}
+            S3_PUBLIC_ENDPOINT=${{ secrets.S3_PUBLIC_ENDPOINT }}
+            S3_INTERNAL_ENDPOINT=${{ secrets.S3_INTERNAL_ENDPOINT }}

If you can adjust the Dockerfile, prefer BuildKit secrets:

-          build-args: |
+          build-args: |
             CAP_AWS_BUCKET=${{ secrets.CAP_AWS_BUCKET }}
             CAP_AWS_REGION=${{ secrets.CAP_AWS_REGION }}
             CAP_AWS_ENDPOINT=${{ secrets.CAP_AWS_ENDPOINT }}
             S3_PUBLIC_ENDPOINT=${{ secrets.S3_PUBLIC_ENDPOINT }}
             S3_INTERNAL_ENDPOINT=${{ secrets.S3_INTERNAL_ENDPOINT }}
+          secrets: |
+            DATABASE_URL=${{ secrets.BUILD_DATABASE_URL }}
+            CAP_AWS_ACCESS_KEY=${{ secrets.CAP_AWS_ACCESS_KEY }}
+            CAP_AWS_SECRET_KEY=${{ secrets.CAP_AWS_SECRET_KEY }}
+            NEXTAUTH_SECRET=${{ secrets.NEXTAUTH_SECRET }}
+            NEXTAUTH_URL=${{ secrets.NEXTAUTH_URL }}

Note: Checkov flagged basic auth in lines 42–43; changes above address it.
I can send a Dockerfile patch using RUN --mount=type=secret,id=... if you want.

Also applies to: 72-83

🧰 Tools
🪛 Checkov (3.2.334)

[MEDIUM] 42-43: Basic Auth Credentials

(CKV_SECRET_4)


- name: Login to GitHub Container Registry
uses: docker/login-action@v3
Expand All @@ -58,6 +68,17 @@ jobs:
outputs: type=image,name=ghcr.io/${{ github.repository_owner }}/cap-web,push-by-digest=true
cache-from: type=gha,scope=buildx-${{ matrix.platform }}
cache-to: type=gha,mode=max,scope=buildx-${{ matrix.platform }}
build-args: |
DATABASE_URL=mysql://build:build@build:3306/build
CAP_AWS_BUCKET=build-bucket
CAP_AWS_REGION=us-east-1
CAP_AWS_ACCESS_KEY=build-access-key
CAP_AWS_SECRET_KEY=build-secret-key
NEXTAUTH_SECRET=build-nextauth-secret-placeholder-32-chars
NEXTAUTH_URL=https://build.placeholder.com
CAP_AWS_ENDPOINT=https://build.placeholder.com
S3_PUBLIC_ENDPOINT=https://build.placeholder.com
S3_INTERNAL_ENDPOINT=https://build.placeholder.com

- name: Export Digest
run: |
Expand Down
8 changes: 1 addition & 7 deletions apps/web/app/api/screenshot/route.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import { db } from "@cap/database";
import { getCurrentUser } from "@cap/database/auth/session";
import { s3Buckets, videos } from "@cap/database/schema";
import { serverEnv } from "@cap/env";
import { S3_BUCKET_URL } from "@cap/utils";
import { eq } from "drizzle-orm";
import type { NextRequest } from "next/server";
import { getHeaders } from "@/utils/helpers";
Expand Down Expand Up @@ -90,11 +88,7 @@ export async function GET(request: NextRequest) {

let screenshotUrl: string;

if (video.awsBucket !== serverEnv().CAP_AWS_BUCKET) {
screenshotUrl = await bucketProvider.getSignedObjectUrl(screenshot.Key!);
} else {
screenshotUrl = `${S3_BUCKET_URL}/${screenshot.Key}`;
}
screenshotUrl = await bucketProvider.getSignedObjectUrl(screenshot.Key!);

return new Response(JSON.stringify({ url: screenshotUrl }), {
status: 200,
Expand Down
26 changes: 6 additions & 20 deletions apps/web/app/api/thumbnail/route.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { db } from "@cap/database";
import { s3Buckets, videos } from "@cap/database/schema";
import { serverEnv } from "@cap/env";
import { S3_BUCKET_URL } from "@cap/utils";
import { eq } from "drizzle-orm";
import type { NextRequest } from "next/server";
import { getHeaders } from "@/utils/helpers";
Expand Down Expand Up @@ -58,19 +56,7 @@ export async function GET(request: NextRequest) {
);
}

const { video } = result;
const prefix = `${userId}/${videoId}/`;

let thumbnailUrl: string;

if (!result.bucket || video.awsBucket === serverEnv().CAP_AWS_BUCKET) {
thumbnailUrl = `${S3_BUCKET_URL}/${prefix}screenshot/screen-capture.jpg`;
return new Response(JSON.stringify({ screen: thumbnailUrl }), {
status: 200,
headers: getHeaders(origin),
});
}

const bucketProvider = await createBucketProvider(result.bucket);

try {
Expand All @@ -96,7 +82,12 @@ export async function GET(request: NextRequest) {
);
}

thumbnailUrl = await bucketProvider.getSignedObjectUrl(thumbnailKey);
const thumbnailUrl = await bucketProvider.getSignedObjectUrl(thumbnailKey);

return new Response(JSON.stringify({ screen: thumbnailUrl }), {
status: 200,
headers: getHeaders(origin),
});
} catch (error) {
return new Response(
JSON.stringify({
Expand All @@ -110,9 +101,4 @@ export async function GET(request: NextRequest) {
},
);
}

return new Response(JSON.stringify({ screen: thumbnailUrl }), {
status: 200,
headers: getHeaders(origin),
});
}
33 changes: 27 additions & 6 deletions apps/web/app/api/video/playlistUrl/route.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import { db } from "@cap/database";
import { videos } from "@cap/database/schema";
import { getCurrentUser } from "@cap/database/auth/session";
import { s3Buckets, videos } from "@cap/database/schema";
import { serverEnv } from "@cap/env";
import { S3_BUCKET_URL } from "@cap/utils";
import { eq } from "drizzle-orm";
import type { NextRequest } from "next/server";
import { CACHE_CONTROL_HEADERS, getHeaders } from "@/utils/helpers";
import { createBucketProvider } from "@/utils/s3";

export const revalidate = 0;

Expand Down Expand Up @@ -36,7 +37,11 @@ export async function GET(request: NextRequest) {
);
}

const query = await db().select().from(videos).where(eq(videos.id, videoId));
const query = await db()
.select({ video: videos, bucket: s3Buckets })
.from(videos)
.leftJoin(s3Buckets, eq(videos.bucket, s3Buckets.id))
.where(eq(videos.id, videoId));

if (query.length === 0) {
return new Response(
Expand All @@ -48,8 +53,8 @@ export async function GET(request: NextRequest) {
);
}

const video = query[0];
if (!video) {
const result = query[0];
if (!result?.video) {
return new Response(
JSON.stringify({ error: true, message: "Video not found" }),
{
Expand All @@ -59,8 +64,24 @@ export async function GET(request: NextRequest) {
);
}

const { video, bucket } = result;

if (video.jobStatus === "COMPLETE") {
const playlistUrl = `${S3_BUCKET_URL}/${video.ownerId}/${video.id}/output/video_recording_000_output.m3u8`;
// Enforce access control for non-public videos
if (video.public === false) {
const user = await getCurrentUser();
if (!user || user.id !== video.ownerId) {
return new Response(
JSON.stringify({ error: true, message: "Video is not public" }),
{ status: 401, headers: getHeaders(origin) },
);
}
}

const bucketProvider = await createBucketProvider(bucket);
const playlistKey = `${video.ownerId}/${video.id}/output/video_recording_000_output.m3u8`;
const playlistUrl = await bucketProvider.getSignedObjectUrl(playlistKey);

return new Response(
JSON.stringify({ playlistOne: playlistUrl, playlistTwo: null }),
{
Expand Down
7 changes: 2 additions & 5 deletions apps/web/app/embed/[videoId]/_components/EmbedVideo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import {
parseVTT,
type TranscriptEntry,
} from "@/app/s/[videoId]/_components/utils/transcript-utils";
import { usePublicEnv } from "@/utils/public-env";

declare global {
interface Window {
Expand Down Expand Up @@ -147,8 +146,6 @@ export const EmbedVideo = forwardRef<
}
}, [chapters]);

const publicEnv = usePublicEnv();

let videoSrc: string;
let enableCrossOrigin = false;

Expand All @@ -163,9 +160,9 @@ export const EmbedVideo = forwardRef<
) {
videoSrc = `/api/playlist?userId=${data.ownerId}&videoId=${data.id}&videoType=master`;
} else if (data.source.type === "MediaConvert") {
videoSrc = `${publicEnv.s3BucketUrl}/${data.ownerId}/${data.id}/output/video_recording_000.m3u8`;
videoSrc = `/api/playlist?userId=${data.ownerId}&videoId=${data.id}&videoType=video`;
} else {
videoSrc = `${publicEnv.s3BucketUrl}/${data.ownerId}/${data.id}/combined-source/stream.m3u8`;
videoSrc = `/api/playlist?userId=${data.ownerId}&videoId=${data.id}&videoType=video`;
}

useEffect(() => {
Expand Down
7 changes: 2 additions & 5 deletions apps/web/app/s/[videoId]/_components/ShareVideo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import {
useState,
} from "react";
import { UpgradeModal } from "@/components/UpgradeModal";
import { usePublicEnv } from "@/utils/public-env";
import { CapVideoPlayer } from "./CapVideoPlayer";
import { HLSVideoPlayer } from "./HLSVideoPlayer";
import {
Expand Down Expand Up @@ -123,8 +122,6 @@ export const ShareVideo = forwardRef<
}
}, [chapters]);

const publicEnv = usePublicEnv();

let videoSrc: string;
let enableCrossOrigin = false;

Expand All @@ -139,9 +136,9 @@ export const ShareVideo = forwardRef<
) {
videoSrc = `/api/playlist?userId=${data.ownerId}&videoId=${data.id}&videoType=master`;
} else if (data.source.type === "MediaConvert") {
videoSrc = `${publicEnv.s3BucketUrl}/${data.ownerId}/${data.id}/output/video_recording_000.m3u8`;
videoSrc = `/api/playlist?userId=${data.ownerId}&videoId=${data.id}&videoType=video`;
} else {
videoSrc = `${publicEnv.s3BucketUrl}/${data.ownerId}/${data.id}/combined-source/stream.m3u8`;
videoSrc = `/api/playlist?userId=${data.ownerId}&videoId=${data.id}&videoType=video`;
}

return (
Expand Down
30 changes: 16 additions & 14 deletions apps/web/utils/s3.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import {
type ListObjectsV2Output,
type ObjectIdentifier,
PutObjectCommand,
PutObjectCommandInput,
type PutObjectCommandOutput,
type PutObjectRequest,
S3Client,
Expand Down Expand Up @@ -262,33 +261,36 @@ function createS3Provider(
),
);
},
headObject: (key) =>
getClient(true).then((client) =>
client.send(new HeadObjectCommand({ Bucket: bucket, Key: key })),
),
putObject: (key, body, fields) =>
getClient(true).then((client) =>
client.send(
async headObject(key: string) {
return await getClient(true).then((c) =>
c.send(new HeadObjectCommand({ Bucket: bucket, Key: key })),
);
},
async putObject(key: string, body, fields) {
return await getClient(true).then((c) =>
c.send(
new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: body,
ContentType: fields?.contentType,
}),
),
),
copyObject: (source, key, args) =>
getClient(true).then((client) =>
client.send(
);
},
async copyObject(source: string, key: string, args) {
return await getClient(true).then((c) =>
c.send(
new CopyObjectCommand({
Bucket: bucket,
CopySource: source,
Key: key,
...args,
}),
),
),
deleteObject: (key) =>
);
},
deleteObject: (key: string) =>
getClient(true).then((client) =>
client.send(new DeleteObjectCommand({ Bucket: bucket, Key: key })),
),
Expand Down
32 changes: 32 additions & 0 deletions packages/database/auth/auth-options.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { dub } from "../dub";
import { sendEmail } from "../emails/config";
import { nanoId } from "../helpers";
import { organizationMembers, organizations, users } from "../schema";
import { isEmailAllowedForSignup } from "./domain-utils";
import { DrizzleAdapter } from "./drizzle-adapter";

export const config = {
Expand Down Expand Up @@ -189,6 +190,37 @@ export const authOptions = (): NextAuthOptions => {
},
},
callbacks: {
async signIn({ user, email, credentials }) {
const allowedDomains = serverEnv().CAP_ALLOWED_SIGNUP_DOMAINS;
if (!allowedDomains) return true;

// Get email from either user object (OAuth) or email parameter (email provider)
const userEmail =
user?.email ||
(typeof email === "string"
? email
: typeof credentials?.email === "string"
? credentials.email
: null);
if (!userEmail || typeof userEmail !== "string") return true;

const [existingUser] = await db()
.select()
.from(users)
.where(eq(users.email, userEmail))
.limit(1);

// Only apply domain restrictions for new users, existing ones can always sign in
if (
!existingUser &&
!isEmailAllowedForSignup(userEmail, allowedDomains)
) {
console.warn(`Signup blocked for email domain: ${userEmail}`);
return false;
}

return true;
},
async session({ token, session }) {
if (!session.user) return session;

Expand Down
Loading