diff --git a/apps/docs/content/guides/auth/social-login/auth-linkedin.mdx b/apps/docs/content/guides/auth/social-login/auth-linkedin.mdx index c01d491f023b2..251b54e22ba3b 100644 --- a/apps/docs/content/guides/auth/social-login/auth-linkedin.mdx +++ b/apps/docs/content/guides/auth/social-login/auth-linkedin.mdx @@ -188,4 +188,4 @@ Do reach out to support if you have any concerns around this change. - [Supabase - Get started for free](https://supabase.com) - [Supabase JS Client](https://github.com/supabase/supabase-js) -- [LinkedIn Developer Dashboard](https://api.LinkedIn.com/apps) +- [LinkedIn Developer Dashboard](https://www.linkedin.com/developers/apps) diff --git a/apps/docs/content/guides/platform/migrating-to-supabase/firebase-auth.mdx b/apps/docs/content/guides/platform/migrating-to-supabase/firebase-auth.mdx index e33c787357e1e..bc3dd362dfb81 100644 --- a/apps/docs/content/guides/platform/migrating-to-supabase/firebase-auth.mdx +++ b/apps/docs/content/guides/platform/migrating-to-supabase/firebase-auth.mdx @@ -88,4 +88,4 @@ For more advanced migrations, including the use of a middleware server component ## Enterprise -[Contact us](https://forms.supabase.com/enterprise) if you need more help migrating your project. +[Contact us](https://forms.supabase.com/firebase-migration) if you need more help migrating your project. diff --git a/apps/docs/content/guides/platform/migrating-to-supabase/firebase-storage.mdx b/apps/docs/content/guides/platform/migrating-to-supabase/firebase-storage.mdx index 3704d6dd6f7b0..345e3e6daf903 100644 --- a/apps/docs/content/guides/platform/migrating-to-supabase/firebase-storage.mdx +++ b/apps/docs/content/guides/platform/migrating-to-supabase/firebase-storage.mdx @@ -67,4 +67,4 @@ If the bucket doesn't exist, it's created as a `non-public` bucket. You must set ## Enterprise -[Contact us](https://forms.supabase.com/enterprise) if you need more help migrating your project. +[Contact us](https://forms.supabase.com/firebase-migration) if you need more help migrating your project. diff --git a/apps/docs/content/guides/platform/migrating-to-supabase/firestore-data.mdx b/apps/docs/content/guides/platform/migrating-to-supabase/firestore-data.mdx index 063eecdbad51f..3121f96901f29 100644 --- a/apps/docs/content/guides/platform/migrating-to-supabase/firestore-data.mdx +++ b/apps/docs/content/guides/platform/migrating-to-supabase/firestore-data.mdx @@ -211,4 +211,4 @@ The result is two separate JSON files: ## Enterprise -[Contact us](https://forms.supabase.com/enterprise) if you need more help migrating your project. +[Contact us](https://forms.supabase.com/firebase-migration) if you need more help migrating your project. diff --git a/apps/docs/docs/ref/swift/installing.mdx b/apps/docs/docs/ref/swift/installing.mdx index cbc57f06a7b9a..2ba0b701403ee 100644 --- a/apps/docs/docs/ref/swift/installing.mdx +++ b/apps/docs/docs/ref/swift/installing.mdx @@ -20,6 +20,10 @@ custom_edit_url: https://github.com/supabase/supabase/edit/master/web/spec/supab - `Functions` - `Storage` + If you use Xcode, follow [Apple's dependencies guide](https://developer.apple.com/documentation/swift_packages/adding_package_dependencies_to_your_app) to add supabase-swift to your project. Use https://github.com/supabase-community/supabase-swift.git for the url when Xcode asks. + + If you don't want the full Supabase environment, you can add individual packages, such as Functions, `Auth`, `Realtime`, `Storage`, or `PostgREST`. + diff --git a/apps/docs/spec/supabase_swift_v1.yml b/apps/docs/spec/supabase_swift_v1.yml index e88c32c51ec61..f8d4b851d78e9 100644 --- a/apps/docs/spec/supabase_swift_v1.yml +++ b/apps/docs/spec/supabase_swift_v1.yml @@ -25,13 +25,13 @@ functions: name: Initialize Client code: | ```swift - let client = SupabaseClient(supabaseURL: URL(string: "https://xyzcompany.supabase.co")!, supabaseKey: "public-anon-key") + let supabase = SupabaseClient(supabaseURL: URL(string: "https://xyzcompany.supabase.co")!, supabaseKey: "public-anon-key") ``` - id: initialize-client-custom-options name: Initialize Client with custom options code: | ```swift - let client = SupabaseClient( + let supabase = SupabaseClient( supabaseURL: URL(string: "https://xyzcompany.supabase.co")!, supabaseKey: "public-anon-key", options: SupabaseClientOptions( diff --git a/apps/docs/spec/supabase_swift_v2.yml b/apps/docs/spec/supabase_swift_v2.yml index 4c83fb45c7350..64c83d73331a6 100644 --- a/apps/docs/spec/supabase_swift_v2.yml +++ b/apps/docs/spec/supabase_swift_v2.yml @@ -27,7 +27,7 @@ functions: ```swift import Supabase - let client = SupabaseClient(supabaseURL: URL(string: "https://xyzcompany.supabase.co")!, supabaseKey: "public-anon-key") + let supabase = SupabaseClient(supabaseURL: URL(string: "https://xyzcompany.supabase.co")!, supabaseKey: "public-anon-key") ``` - id: initialize-client-custom-options name: Initialize Client with custom options @@ -35,7 +35,7 @@ functions: ```swift import Supabase - let client = SupabaseClient( + let supabase = SupabaseClient( supabaseURL: URL(string: "https://xyzcompany.supabase.co")!, supabaseKey: "public-anon-key", options: SupabaseClientOptions( @@ -65,7 +65,7 @@ functions: } } - let client = SupabaseClient( + let supabase = SupabaseClient( supabaseURL: URL(string: "https://xyzcompany.supabase.co")!, supabaseKey: "public-anon-key", options: SupabaseClientOptions( diff --git a/apps/studio/components/grid/components/editor/BooleanEditor.tsx b/apps/studio/components/grid/components/editor/BooleanEditor.tsx index bb8f0d3d2d40c..39953d1c4937f 100644 --- a/apps/studio/components/grid/components/editor/BooleanEditor.tsx +++ b/apps/studio/components/grid/components/editor/BooleanEditor.tsx @@ -1,5 +1,4 @@ import type { RenderEditCellProps } from 'react-data-grid' -import { useTableEditorTableStateSnapshot } from 'state/table-editor-table' import { Select } from 'ui' interface Props extends RenderEditCellProps { @@ -13,8 +12,6 @@ export const BooleanEditor = ({ onRowChange, onClose, }: Props) => { - const snap = useTableEditorTableStateSnapshot() - const gridColumn = snap.gridColumns.find((x) => x.name == column.key) const value = row[column.key as keyof TRow] as unknown as string const onBlur = () => onClose(false) @@ -36,7 +33,7 @@ export const BooleanEditor = ({ onBlur={onBlur} onChange={onChange} defaultValue={value === null ? 'null' : value.toString()} - style={{ width: `${gridColumn?.width || column.width}px` }} + style={{ width: `${column.width}px` }} > TRUE FALSE diff --git a/apps/studio/components/grid/components/editor/JsonEditor.tsx b/apps/studio/components/grid/components/editor/JsonEditor.tsx index d1c441c0bc50c..e99c1104c6c00 100644 --- a/apps/studio/components/grid/components/editor/JsonEditor.tsx +++ b/apps/studio/components/grid/components/editor/JsonEditor.tsx @@ -10,7 +10,6 @@ import { isTableLike } from 'data/table-editor/table-editor-types' import { useGetCellValueMutation } from 'data/table-rows/get-cell-value-mutation' import { useSelectedProjectQuery } from 'hooks/misc/useSelectedProject' import { prettifyJSON, removeJSONTrailingComma, tryParseJson } from 'lib/helpers' -import { useTableEditorTableStateSnapshot } from 'state/table-editor-table' import { Popover, Tooltip, TooltipContent, TooltipTrigger } from 'ui' import { BlockKeys } from '../common/BlockKeys' import { MonacoEditor } from '../common/MonacoEditor' @@ -55,7 +54,6 @@ export const JsonEditor = ({ const { id: _id } = useParams() const id = _id ? Number(_id) : undefined const { data: project } = useSelectedProjectQuery() - const snap = useTableEditorTableStateSnapshot() const { data: selectedTable } = useTableEditorQuery({ projectRef: project?.ref, @@ -63,8 +61,6 @@ export const JsonEditor = ({ id, }) - const gridColumn = snap.gridColumns.find((x) => x.name == column.key) - const rawInitialValue = row[column.key as keyof TRow] as unknown const initialValue = rawInitialValue === null || rawInitialValue === undefined || typeof rawInitialValue === 'string' @@ -161,13 +157,13 @@ export const JsonEditor = ({ overlay={ isTruncated && !isSuccess ? (
{}} - width={`${gridColumn?.width || column.width}px`} + width={`${column.width}px`} value={value ?? ''} language="markdown" /> @@ -176,7 +172,7 @@ export const JsonEditor = ({ ) : ( extends RenderEditCellProps { isNullable?: boolean @@ -17,9 +15,6 @@ export function SelectEditor({ options, isNullable, }: SelectEditorProps) { - const snap = useTableEditorTableStateSnapshot() - const gridColumn = snap.gridColumns.find((x) => x.name == column.key) - const value = row[column.key as keyof TRow] as unknown as string function onChange(event: any) { @@ -42,7 +37,7 @@ export function SelectEditor({ size="small" defaultValue={value ?? ''} className="sb-grid-select-editor !gap-2" - style={{ width: `${gridColumn?.width || column.width}px` }} + style={{ width: `${column.width}px` }} // @ts-ignore onChange={onChange} onBlur={onBlur} diff --git a/apps/studio/components/grid/components/editor/TextEditor.tsx b/apps/studio/components/grid/components/editor/TextEditor.tsx index 15e111e13fda1..ca49402a3d55e 100644 --- a/apps/studio/components/grid/components/editor/TextEditor.tsx +++ b/apps/studio/components/grid/components/editor/TextEditor.tsx @@ -9,7 +9,6 @@ import { useTableEditorQuery } from 'data/table-editor/table-editor-query' import { isTableLike } from 'data/table-editor/table-editor-types' import { useGetCellValueMutation } from 'data/table-rows/get-cell-value-mutation' import { useSelectedProjectQuery } from 'hooks/misc/useSelectedProject' -import { useTableEditorTableStateSnapshot } from 'state/table-editor-table' import { Button, Popover, Tooltip, TooltipContent, TooltipTrigger, cn } from 'ui' import ConfirmationModal from 'ui-patterns/Dialogs/ConfirmationModal' import { BlockKeys } from '../common/BlockKeys' @@ -30,7 +29,6 @@ export const TextEditor = ({ isEditable?: boolean onExpandEditor: (column: string, row: TRow) => void }) => { - const snap = useTableEditorTableStateSnapshot() const { id: _id } = useParams() const id = _id ? Number(_id) : undefined const { data: project } = useSelectedProjectQuery() @@ -41,7 +39,6 @@ export const TextEditor = ({ id, }) - const gridColumn = snap.gridColumns.find((x) => x.name == column.key) const rawValue = row[column.key as keyof TRow] as unknown const initialValue = rawValue || rawValue === '' ? String(rawValue) : null const [isPopoverOpen, setIsPopoverOpen] = useState(true) @@ -114,13 +111,13 @@ export const TextEditor = ({ overlay={ isTruncated && !isSuccess ? (
{}} - width={`${gridColumn?.width || column.width}px`} + width={`${column.width}px`} value={value ?? ''} language="markdown" /> @@ -134,7 +131,7 @@ export const TextEditor = ({ ignoreOutsideClicks={isConfirmNextModalOpen} > (bearer = b)) .catch((err) => toast.error(`Failed to get JWT for role: ${err.message}`)) } else { - token = serviceKey?.api_key ?? publishableKey?.api_key + try { + const data = await getTemporaryAPIKey({ projectRef: config.projectRef }) + token = data.api_key + } catch (error) { + token = publishableKey?.api_key + } } if (token) { onChangeConfig({ ...config, token, bearer }) @@ -76,24 +82,7 @@ export const RealtimeTokensPopover = ({ config, onChangeConfig }: RealtimeTokens triggerUpdateTokenBearer() // eslint-disable-next-line react-hooks/exhaustive-deps - }, [snap.role, anonKey, serviceKey]) + }, [snap.role, anonKey]) - return ( - - Role impersonation for the Realtime Inspector is currently unavailable temporarily due - to the new API keys. Please re-enable{' '} - legacy JWT keys if - you'd like to use role impersonation with the Realtime Inspector. - - ) : undefined - } - align="start" - variant="connected-on-both" - /> - ) + return } diff --git a/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx b/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx index 172d6a7575478..b02df4a2a4e68 100644 --- a/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx +++ b/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx @@ -39,6 +39,7 @@ import { } from 'ui' import { Admonition } from 'ui-patterns/admonition' import { FormItemLayout } from 'ui-patterns/form/FormItemLayout/FormItemLayout' +import { inverseValidBucketNameRegex, validBucketNameRegex } from './CreateBucketModal.utils' import { convertFromBytes, convertToBytes } from './StorageSettings/StorageSettings.utils' export interface CreateBucketModalProps { @@ -46,29 +47,42 @@ export interface CreateBucketModalProps { onClose: () => void } -const FormSchema = z.object({ - name: z - .string() - .trim() - .min(1, 'Please provide a name for your bucket') - .regex( - /^[a-z0-9.-]+$/, - 'The name of the bucket must only contain lowercase letters, numbers, dots, and hyphens' - ) - .refine((value) => !value.endsWith(' '), 'The name of the bucket cannot end with a whitespace') - .refine( - (value) => value !== 'public', - '"public" is a reserved name. Please choose another name' - ), - type: z.enum(['STANDARD', 'ANALYTICS']).default('STANDARD'), - public: z.boolean().default(false), - has_file_size_limit: z.boolean().default(false), - formatted_size_limit: z.coerce - .number() - .min(0, 'File size upload limit has to be at least 0') - .default(0), - allowed_mime_types: z.string().trim().default(''), -}) +const FormSchema = z + .object({ + name: z + .string() + .trim() + .min(1, 'Please provide a name for your bucket') + .max(100, 'Bucket name should be below 100 characters') + .refine( + (value) => !value.endsWith(' '), + 'The name of the bucket cannot end with a whitespace' + ) + .refine( + (value) => value !== 'public', + '"public" is a reserved name. Please choose another name' + ), + type: z.enum(['STANDARD', 'ANALYTICS']).default('STANDARD'), + public: z.boolean().default(false), + has_file_size_limit: z.boolean().default(false), + formatted_size_limit: z.coerce + .number() + .min(0, 'File size upload limit has to be at least 0') + .default(0), + allowed_mime_types: z.string().trim().default(''), + }) + .superRefine((data, ctx) => { + if (!validBucketNameRegex.test(data.name)) { + const [match] = data.name.match(inverseValidBucketNameRegex) ?? [] + ctx.addIssue({ + path: ['name'], + code: z.ZodIssueCode.custom, + message: !!match + ? `Bucket name cannot contain the "${match}" character` + : 'Bucket name contains an invalid special character', + }) + } + }) export type CreateBucketForm = z.infer @@ -182,10 +196,9 @@ const CreateBucketModal = ({ visible, onClose }: CreateBucketModalProps) => { name="name" render={({ field }) => ( @@ -194,12 +207,12 @@ const CreateBucketModal = ({ visible, onClose }: CreateBucketModalProps) => { )} /> -
+
( - + { - const { renameFile, renameFolder, addNewFolder } = useStorageExplorerStateSnapshot() + const { renameFile, renameFolder, addNewFolder, updateRowStatus } = + useStorageExplorerStateSnapshot() const inputRef = useRef(null) const [itemName, setItemName] = useState(item.name) @@ -28,7 +29,22 @@ const FileExplorerRowEditing = ({ item, view, columnIndex }: FileExplorerRowEdit await renameFile(item, name, columnIndex) } else if (has(item, 'id')) { const itemWithColumnIndex = { ...item, columnIndex } - renameFolder(itemWithColumnIndex, name, columnIndex) + renameFolder({ + folder: itemWithColumnIndex, + newName: name, + columnIndex, + onError: () => { + if (event.type === 'blur') { + updateRowStatus({ + name: itemWithColumnIndex.name, + status: STORAGE_ROW_STATUS.READY, + columnIndex, + }) + } else { + inputRef.current.select() + } + }, + }) } else { addNewFolder({ folderName: name, diff --git a/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx b/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx index 160e8118e4f9b..8871cbff00dc1 100644 --- a/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx +++ b/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx @@ -167,7 +167,7 @@ const StorageExplorer = ({ bucket }: StorageExplorerProps) => { return (
{selectedItems.length === 0 ? ( void + onEdit: (id: string) => void + isAfterEditedMessage: boolean + isBeingEdited: boolean + onCancelEdit: () => void }) => { return ( ) } @@ -82,6 +93,8 @@ export const AIAssistant = ({ className }: AIAssistantProps) => { const { ref, id: entityId } = useParams() const searchParams = useSearchParamsShallow() + useHotKey(() => cancelEdit(), 'Escape') + const disablePrompts = useFlag('disableAssistantPrompts') const { snippets } = useSqlEditorV2StateSnapshot() const snap = useAiAssistantStateSnapshot() @@ -105,6 +118,7 @@ export const AIAssistant = ({ className }: AIAssistantProps) => { const [value, setValue] = useState(snap.initialInput || '') const [isConfirmOptInModalOpen, setIsConfirmOptInModalOpen] = useState(false) + const [editingMessageId, setEditingMessageId] = useState(null) const { data: check, isSuccess } = useCheckOpenAIKeyQuery() const isApiKeySet = IS_PLATFORM || !!check?.hasKey @@ -235,25 +249,74 @@ export const AIAssistant = ({ className }: AIAssistantProps) => { [snap] ) + const editMessage = useCallback( + (messageId: string) => { + const messageIndex = chatMessages.findIndex((msg) => msg.id === messageId) + if (messageIndex === -1) return + + // Target message + const messageToEdit = chatMessages[messageIndex] + + // Activate editing mode + setEditingMessageId(messageId) + const textContent = + messageToEdit.parts + ?.filter((part) => part.type === 'text') + .map((part) => part.text) + .join('') ?? '' + setValue(textContent) + + if (inputRef.current) { + inputRef.current.focus() + } + }, + [chatMessages, setValue] + ) + + const cancelEdit = useCallback(() => { + setEditingMessageId(null) + setValue('') + }, [setValue]) + const renderedMessages = useMemo( () => - chatMessages.map((message) => { + chatMessages.map((message, index) => { + const isBeingEdited = editingMessageId === message.id + const isAfterEditedMessage = editingMessageId + ? chatMessages.findIndex((m) => m.id === editingMessageId) < index + : false + return ( ) }), - [chatMessages, isChatLoading, updateMessage] + [chatMessages, isChatLoading, updateMessage, editMessage, editingMessageId, cancelEdit] ) const hasMessages = chatMessages.length > 0 const isShowingOnboarding = !hasMessages && isApiKeySet const sendMessageToAssistant = (finalContent: string) => { + if (editingMessageId) { + // Handling when the user is in edit mode + const messageIndex = chatMessages.findIndex((msg) => msg.id === editingMessageId) + if (messageIndex === -1) return + + snap.deleteMessagesAfter(editingMessageId, { includeSelf: true }) + const updatedMessages = chatMessages.slice(0, messageIndex) + setMessages(updatedMessages) + setEditingMessageId(null) + } + const payload = { role: 'user', createdAt: new Date(), @@ -289,6 +352,7 @@ export const AIAssistant = ({ className }: AIAssistantProps) => { snap.clearMessages() setMessages([]) lastUserMessageRef.current = null + setEditingMessageId(null) } // Update scroll behavior for new messages @@ -432,7 +496,7 @@ export const AIAssistant = ({ className }: AIAssistantProps) => { )}
{hasMessages ? ( -
+
{renderedMessages} {error && (
@@ -528,40 +592,79 @@ export const AIAssistant = ({ className }: AIAssistantProps) => {
- {!isSticky && ( - <> - -
- +
+ +
+
+ + Editing message +
+ } + onClick={cancelEdit} + className="w-6 h-6 p-0" + title="Cancel editing" + aria-label="Cancel editing" + tooltip={{ + content: { side: 'top', text: }, + }} + /> +
+
+
+
+ )} + {!isSticky && !editingMessageId && ( + +
+ + - -
-
- + + + +
+
)}
diff --git a/apps/studio/components/ui/AIAssistantPanel/AssistantChatForm.tsx b/apps/studio/components/ui/AIAssistantPanel/AssistantChatForm.tsx index 75bca4790a079..f06425388fbcf 100644 --- a/apps/studio/components/ui/AIAssistantPanel/AssistantChatForm.tsx +++ b/apps/studio/components/ui/AIAssistantPanel/AssistantChatForm.tsx @@ -1,12 +1,12 @@ import { ArrowUp, Loader2, Square } from 'lucide-react' -import React, { ChangeEvent, memo, useRef } from 'react' +import { ChangeEvent, FormEvent, forwardRef, KeyboardEvent, memo, useRef } from 'react' import { useBreakpoint } from 'common' import { ExpandingTextArea } from 'ui' import { cn } from 'ui/src/lib/utils' import { ButtonTooltip } from '../ButtonTooltip' import { type SqlSnippet } from './AIAssistant.types' -import { SnippetRow, getSnippetContent } from './SnippetRow' +import { getSnippetContent, SnippetRow } from './SnippetRow' export interface FormProps { /* The ref for the textarea, optional. Exposed for the CommandsPopover to attach events. */ @@ -43,7 +43,7 @@ export interface FormProps { className?: string } -const AssistantChatFormComponent = React.forwardRef( +const AssistantChatFormComponent = forwardRef( ( { loading = false, @@ -66,7 +66,7 @@ const AssistantChatFormComponent = React.forwardRef( const formRef = useRef(null) const isMobile = useBreakpoint('md') - const handleSubmit = (event?: React.FormEvent) => { + const handleSubmit = (event?: FormEvent) => { if (event) event.preventDefault() if (!value || loading) return @@ -81,7 +81,7 @@ const AssistantChatFormComponent = React.forwardRef( onSubmit(finalMessage) } - const handleKeyDown = (event: React.KeyboardEvent) => { + const handleKeyDown = (event: KeyboardEvent) => { if (event.key === 'Enter' && !event.shiftKey) { event.preventDefault() handleSubmit() diff --git a/apps/studio/components/ui/AIAssistantPanel/Message.tsx b/apps/studio/components/ui/AIAssistantPanel/Message.tsx index 7838c5ceda057..3a295e2d6ce25 100644 --- a/apps/studio/components/ui/AIAssistantPanel/Message.tsx +++ b/apps/studio/components/ui/AIAssistantPanel/Message.tsx @@ -1,5 +1,5 @@ import { UIMessage as VercelMessage } from '@ai-sdk/react' -import { Loader2 } from 'lucide-react' +import { Loader2, Pencil } from 'lucide-react' import { createContext, PropsWithChildren, ReactNode, useMemo } from 'react' import ReactMarkdown from 'react-markdown' import { Components } from 'react-markdown/lib/ast-to-react' @@ -8,6 +8,7 @@ import remarkGfm from 'remark-gfm' import { ProfileImage } from 'components/ui/ProfileImage' import { useProfile } from 'lib/profile' import { cn, markdownComponents, WarningIcon } from 'ui' +import { ButtonTooltip } from '../ButtonTooltip' import { EdgeFunctionBlock } from '../EdgeFunctionBlock/EdgeFunctionBlock' import { DisplayBlockRenderer } from './DisplayBlockRenderer' import { @@ -50,6 +51,10 @@ interface MessageProps { resultId?: string results: any[] }) => void + onEdit: (id: string) => void + isAfterEditedMessage: boolean + isBeingEdited: boolean + onCancelEdit: () => void } export const Message = function Message({ @@ -60,6 +65,10 @@ export const Message = function Message({ action = null, variant = 'default', onResults, + onEdit, + isAfterEditedMessage = false, + isBeingEdited = false, + onCancelEdit, }: PropsWithChildren) { const { profile } = useProfile() const allMarkdownComponents: Partial = useMemo( @@ -95,16 +104,18 @@ export const Message = function Message({
{variant === 'warning' && } {action} -
+
{isUser && ( )} -
+
{shouldUsePartsRendering ? ( (() => { const shownLoadingTools = new Set() @@ -126,7 +137,8 @@ export const Message = function Message({ key={`${id}-part-${index}`} className={cn( 'prose prose-sm [&>div]:my-4 prose-h1:text-xl prose-h1:mt-6 prose-h3:no-underline prose-h3:text-base prose-h3:mb-4 prose-strong:font-medium prose-strong:text-foreground break-words [&>p:not(:last-child)]:!mb-2 [&>*>p:first-child]:!mt-0 [&>*>p:last-child]:!mb-0 [&>*>*>p:first-child]:!mt-0 [&>*>*>p:last-child]:!mb-0 [&>ol>li]:!pl-4', - isUser && 'text-foreground [&>p]:font-medium' + isUser && 'text-foreground [&>p]:font-medium', + isBeingEdited && 'animate-pulse' )} remarkPlugins={[remarkGfm]} components={allMarkdownComponents} @@ -145,7 +157,7 @@ export const Message = function Message({ return (
{`Calling display_query...`} @@ -178,7 +190,7 @@ export const Message = function Message({ return (
{`Calling display_edge_function...`} @@ -224,6 +236,28 @@ export const Message = function Message({ ) : ( Assistant is thinking... )} + + {/* Action button - only show for user messages on hover */} +
+ {message.role === 'user' && ( + } + onClick={isBeingEdited || isAfterEditedMessage ? onCancelEdit : () => onEdit(id)} + className="text-foreground-light hover:text-foreground p-1 rounded" + aria-label={ + isBeingEdited || isAfterEditedMessage ? 'Cancel editing' : 'Edit message' + } + tooltip={{ + content: { + side: 'bottom', + text: + isBeingEdited || isAfterEditedMessage ? 'Cancel editing' : 'Edit message', + }, + }} + /> + )} +
diff --git a/apps/studio/data/api-keys/temp-api-keys-query.ts b/apps/studio/data/api-keys/temp-api-keys-query.ts new file mode 100644 index 0000000000000..4939727c23620 --- /dev/null +++ b/apps/studio/data/api-keys/temp-api-keys-query.ts @@ -0,0 +1,25 @@ +import { handleError, post } from 'data/fetchers' + +interface getTemporaryAPIKeyVariables { + projectRef?: string +} + +// [Joshen] This one specifically shouldn't need a useQuery hook since the expiry is meant to be short lived +// Used in storage explorer and realtime inspector. +export async function getTemporaryAPIKey( + { projectRef }: getTemporaryAPIKeyVariables, + signal?: AbortSignal +) { + if (!projectRef) throw new Error('projectRef is required') + + const { data, error } = await post('/platform/projects/{ref}/api-keys/temporary', { + params: { + path: { ref: projectRef }, + query: { authorization_exp: '300', claims: JSON.stringify({ role: 'service_role' }) }, + }, + signal, + }) + + if (error) handleError(error) + return data +} diff --git a/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx b/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx index 4b432682bf858..f55a43c03badc 100644 --- a/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx +++ b/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx @@ -20,7 +20,7 @@ const PageLayout: NextPageWithLayout = () => { if (!project || !projectRef) return null return ( -
+
{isError && } {isSuccess ? ( diff --git a/apps/studio/state/ai-assistant-state.tsx b/apps/studio/state/ai-assistant-state.tsx index a9034f084d142..4d2836c070c98 100644 --- a/apps/studio/state/ai-assistant-state.tsx +++ b/apps/studio/state/ai-assistant-state.tsx @@ -304,6 +304,19 @@ export const createAiAssistantState = (): AiAssistantState => { } }, + deleteMessagesAfter: (id: string, { includeSelf = true } = {}) => { + const chat = state.activeChat + if (!chat) return + + const messageIndex = chat.messages.findIndex((msg) => msg.id === id) + if (messageIndex === -1) return + + // Delete all messages from the target message (optionally including) to the end + const startIndex = includeSelf ? messageIndex : messageIndex + 1 + chat.messages.splice(startIndex) + chat.updatedAt = new Date() + }, + saveMessage: (message: MessageType | MessageType[]) => { const chat = state.activeChat if (!chat) return @@ -420,6 +433,7 @@ export type AiAssistantState = AiAssistantData & { deleteChat: (id: string) => void renameChat: (id: string, name: string) => void clearMessages: () => void + deleteMessagesAfter: (id: string, options?: { includeSelf?: boolean }) => void saveMessage: (message: MessageType | MessageType[]) => void updateMessage: (args: { id: string; resultId?: string; results: any[] }) => void setSqlSnippets: (snippets: SqlSnippet[]) => void diff --git a/apps/studio/state/storage-explorer.tsx b/apps/studio/state/storage-explorer.tsx index 67e2799411275..37356f4555a67 100644 --- a/apps/studio/state/storage-explorer.tsx +++ b/apps/studio/state/storage-explorer.tsx @@ -8,6 +8,10 @@ import { proxy, useSnapshot } from 'valtio' import { createClient, SupabaseClient } from '@supabase/supabase-js' import { BlobReader, BlobWriter, ZipWriter } from '@zip.js/zip.js' import { LOCAL_STORAGE_KEYS } from 'common' +import { + inverseValidObjectKeyRegex, + validObjectKeyRegex, +} from 'components/interfaces/Storage/CreateBucketModal.utils' import { STORAGE_BUCKET_SORT, STORAGE_ROW_STATUS, @@ -32,7 +36,7 @@ import { } from 'components/interfaces/Storage/StorageExplorer/StorageExplorer.utils' import { convertFromBytes } from 'components/interfaces/Storage/StorageSettings/StorageSettings.utils' import { InlineLink } from 'components/ui/InlineLink' -import { getKeys, useAPIKeysQuery } from 'data/api-keys/api-keys-query' +import { getTemporaryAPIKey } from 'data/api-keys/temp-api-keys-query' import { configKeys } from 'data/config/keys' import { useProjectSettingsV2Query } from 'data/config/project-settings-v2-query' import { ProjectStorageConfigResponse } from 'data/config/project-storage-config-query' @@ -76,13 +80,11 @@ if (typeof window !== 'undefined') { function createStorageExplorerState({ projectRef, resumableUploadUrl, - serviceKey, supabaseClient, }: { projectRef: string resumableUploadUrl: string - serviceKey: string - supabaseClient?: SupabaseClient + supabaseClient?: () => Promise> }) { const localStorageKey = LOCAL_STORAGE_KEYS.STORAGE_PREFERENCE(projectRef) const { view, sortBy, sortByOrder, sortBucket } = @@ -93,7 +95,6 @@ function createStorageExplorerState({ projectRef, supabaseClient, resumableUploadUrl, - serviceKey, uploadProgresses: [] as UploadProgress[], // abortController, @@ -260,6 +261,17 @@ function createStorageExplorerState({ .join('/') }, + validateFolderName: (name: string) => { + if (!validObjectKeyRegex.test(name)) { + const [match] = name.match(inverseValidObjectKeyRegex) ?? [] + return !!match + ? `Folder name cannot contain the "${match}" character` + : 'Folder name contains an invalid special character' + } + + return null + }, + addNewFolderPlaceholder: (columnIndex: number) => { const isPrepend = true const folderName = 'Untitled folder' @@ -292,22 +304,22 @@ function createStorageExplorerState({ autofix, columnIndex, }) + if (formattedName === null) { onError?.() return } - if (!/^[a-zA-Z0-9_-\s]*$/.test(formattedName)) { - onError?.() - return toast.error( - 'Only alphanumeric characters, hyphens, and underscores are allowed for folder names.' - ) - } - if (formattedName.length === 0) { return state.removeTempRows(columnIndex) } + const folderNameError = state.validateFolderName(formattedName) + if (folderNameError) { + onError?.() + return toast.error(folderNameError) + } + state.updateFolderAfterEdit({ folderName: formattedName, columnIndex }) const emptyPlaceholderFile = `${formattedName}/${EMPTY_FOLDER_PLACEHOLDER_FILE_NAME}` @@ -318,7 +330,7 @@ function createStorageExplorerState({ const formattedPathToEmptyPlaceholderFile = pathToFolder.length > 0 ? `${pathToFolder}/${emptyPlaceholderFile}` : emptyPlaceholderFile - await state.supabaseClient.storage + await (await state.supabaseClient()).storage .from(state.selectedBucket.name) .upload( formattedPathToEmptyPlaceholderFile, @@ -568,7 +580,7 @@ function createStorageExplorerState({ if (data.length === 0) { const prefixToPlaceholder = `${parentFolderPrefix}/${EMPTY_FOLDER_PLACEHOLDER_FILE_NAME}` - await state.supabaseClient?.storage + await (await state.supabaseClient!())?.storage .from(state.selectedBucket.name) .upload(prefixToPlaceholder, new File([], EMPTY_FOLDER_PLACEHOLDER_FILE_NAME)) } @@ -602,7 +614,17 @@ function createStorageExplorerState({ } }, - renameFolder: async (folder: StorageItemWithColumn, newName: string, columnIndex: number) => { + renameFolder: async ({ + folder, + newName, + columnIndex, + onError, + }: { + folder: StorageItemWithColumn + newName: string + columnIndex: number + onError?: () => void + }) => { const originalName = folder.name if (originalName === newName) { return state.updateRowStatus({ @@ -612,24 +634,18 @@ function createStorageExplorerState({ }) } + const folderNameError = state.validateFolderName(newName) + if (folderNameError) { + onError?.() + return toast.error(folderNameError) + } + const toastId = toast( , { closeButton: false, position: 'top-right' } ) try { - /** - * Catch any folder names that contain slash or backslash - * - * this is because slashes are used to denote - * children/parent relationships in bucket - * - * todo: move this to a util file, as createFolder() uses same logic - */ - if (newName.includes('/') || newName.includes('\\')) { - return toast.error(`Folder name cannot contain forward or back slashes.`) - } - state.updateRowStatus({ name: originalName, status: STORAGE_ROW_STATUS.LOADING, @@ -959,20 +975,6 @@ function createStorageExplorerState({ columnIndex: number isDrop?: boolean }) => { - if (!state.serviceKey) { - toast( -

- Uploading files to Storage through the dashboard is currently unavailable with the new - API keys. Please re-enable{' '} - - legacy JWT keys - {' '} - if you'd like to upload files to Storage through the dashboard. -

- ) - return - } - const queryClient = getQueryClient() const storageConfiguration = queryClient .getQueryCache() @@ -1169,9 +1171,7 @@ function createStorageExplorerState({ endpoint: state.resumableUploadUrl, retryDelays: [0, 200, 500, 1500, 3000, 5000], headers: { - authorization: `Bearer ${state.serviceKey}`, 'x-source': 'supabase-dashboard', - ...(state.serviceKey.includes('secret') ? { apikey: state.serviceKey } : {}), }, uploadDataDuringCreation: uploadDataDuringCreation, removeFingerprintOnSuccess: true, @@ -1181,6 +1181,14 @@ function createStorageExplorerState({ ...fileOptions, }, chunkSize, + onBeforeRequest: async (req) => { + try { + const data = await getTemporaryAPIKey({ projectRef: state.projectRef }) + req.setHeader('apikey', data.api_key) + } catch (error) { + throw error + } + }, onShouldRetry(error) { const status = error.originalResponse ? error.originalResponse.getStatus() : 0 const doNotRetryStatuses = [400, 403, 404, 409, 413, 415, 429] @@ -1191,22 +1199,30 @@ function createStorageExplorerState({ numberOfFilesUploadedFail += 1 if (error instanceof tus.DetailedError) { const status = error.originalResponse?.getStatus() - if (status === 415) { - // Unsupported mime type - toast.error( - capitalize( - error?.originalResponse?.getBody() || - `Failed to upload ${file.name}: ${metadata.mimetype} is not allowed` - ), - { - description: `Allowed MIME types: ${state.selectedBucket.allowed_mime_types?.join(', ')}`, - } - ) - } else if (status === 413) { - // Payload too large - toast.error( - `Failed to upload ${file.name}: File size exceeds the bucket upload limit.` - ) + + switch (status) { + case 415: + // Unsupported mime type + toast.error( + capitalize( + error?.originalResponse?.getBody() || + `Failed to upload ${file.name}: ${metadata.mimetype} is not allowed` + ), + { + description: `Allowed MIME types: ${state.selectedBucket.allowed_mime_types?.join(', ')}`, + } + ) + break + case 413: + // Payload too large + toast.error( + `Failed to upload ${file.name}: File size exceeds the bucket upload limit.` + ) + break + case 409: + // Resource already exists + toast.error(`Failed to upload ${file.name}: File name already exists.`) + break } } else { toast.error(`Failed to upload ${file.name}: ${error.message}`) @@ -1293,10 +1309,16 @@ function createStorageExplorerState({ ) { toast.dismiss(toastId) } else if (numberOfFilesUploadedFail === numberOfFilesToUpload) { - toast.error( - `Failed to upload ${numberOfFilesToUpload} file${numberOfFilesToUpload > 1 ? 's' : ''}!`, - { id: toastId, closeButton: true, duration: SONNER_DEFAULT_DURATION } - ) + if (numberOfFilesToUpload === 1) { + // [Joshen] We'd already be showing a toast when the upload files, so this is to prevent a + // duplicate error toast if its only one file that's getting uploaded + toast.dismiss(toastId) + } else { + toast.error( + `Failed to upload ${numberOfFilesToUpload} file${numberOfFilesToUpload > 1 ? 's' : ''}!`, + { id: toastId, closeButton: true, duration: SONNER_DEFAULT_DURATION } + ) + } } else if (numberOfFilesUploadedSuccess === numberOfFilesToUpload) { toast.success( `Successfully uploaded ${numberOfFilesToUpload} file${ @@ -1721,7 +1743,7 @@ function createStorageExplorerState({ ) } />, - { id: toastId, closeButton: false, position: 'top-right' } + { id: toastId, closeButton: false, position: 'top-right', duration: Infinity } ) }, @@ -1764,7 +1786,6 @@ type StorageExplorerState = ReturnType const DEFAULT_STATE_CONFIG = { projectRef: '', resumableUploadUrl: '', - serviceKey: '', supabaseClient: undefined, } @@ -1779,10 +1800,8 @@ export const StorageExplorerStateContextProvider = ({ children }: PropsWithChild const [state, setState] = useState(() => createStorageExplorerState(DEFAULT_STATE_CONFIG)) const stateRef = useLatest(state) - const { data: apiKeys } = useAPIKeysQuery({ projectRef: project?.ref, reveal: true }) const { data: settings } = useProjectSettingsV2Query({ projectRef: project?.ref }) - const { serviceKey } = getKeys(apiKeys) const protocol = settings?.app_config?.protocol ?? 'https' const endpoint = settings?.app_config?.endpoint const resumableUploadUrl = `${IS_PLATFORM ? 'https' : protocol}://${endpoint}/storage/v1/upload/resumable` @@ -1794,43 +1813,39 @@ export const StorageExplorerStateContextProvider = ({ children }: PropsWithChild // Although I'd be keen to re-investigate this to see if we can remove this useEffect(() => { const hasDataReady = !!project?.ref - const serviceApiKey = serviceKey?.api_key ?? 'unknown' if (!isPaused && hasDataReady) { - const clientEndpoint = `${IS_PLATFORM ? 'https' : protocol}://${endpoint}` - const supabaseClient = createClient(clientEndpoint, serviceApiKey, { - auth: { - persistSession: false, - autoRefreshToken: false, - detectSessionInUrl: false, - storage: { - getItem: (key) => { - return null - }, - setItem: (key, value) => {}, - removeItem: (key) => {}, - }, - }, - }) - setState( createStorageExplorerState({ projectRef: project?.ref ?? '', - supabaseClient, + supabaseClient: async () => { + try { + const data = await getTemporaryAPIKey({ projectRef: project.ref }) + const clientEndpoint = `${IS_PLATFORM ? 'https' : protocol}://${endpoint}` + + return createClient(clientEndpoint, data.api_key, { + auth: { + persistSession: false, + autoRefreshToken: false, + detectSessionInUrl: false, + storage: { + getItem: (key) => { + return null + }, + setItem: (key, value) => {}, + removeItem: (key) => {}, + }, + }, + }) + } catch (error) { + throw error + } + }, resumableUploadUrl, - serviceKey: serviceApiKey, }) ) } - }, [ - project?.ref, - stateRef, - serviceKey?.api_key, - isPaused, - resumableUploadUrl, - protocol, - endpoint, - ]) + }, [project?.ref, stateRef, isPaused, resumableUploadUrl, protocol, endpoint]) return ( diff --git a/apps/studio/state/table-editor-table.tsx b/apps/studio/state/table-editor-table.tsx index 197a093cebddd..6824c3da674a9 100644 --- a/apps/studio/state/table-editor-table.tsx +++ b/apps/studio/state/table-editor-table.tsx @@ -143,6 +143,18 @@ export const createTableEditorTableState = ({ editable, setEditable: (editable: boolean) => { state.editable = editable + + // When changing the editable flag, all grid columns need to be recreated for the editable flag to be propagated. + state.gridColumns = getInitialGridColumns( + getGridColumns(state.table, { + tableId: table.id, + editable, + onAddColumn: editable ? onAddColumn : undefined, + onExpandJSONEditor, + onExpandTextEditor, + }), + { gridColumns: state.gridColumns } + ) }, }) diff --git a/apps/www/_alternatives/supabase-vs-firebase.mdx b/apps/www/_alternatives/supabase-vs-firebase.mdx index 3bb86c83e1f62..17470aa777abe 100644 --- a/apps/www/_alternatives/supabase-vs-firebase.mdx +++ b/apps/www/_alternatives/supabase-vs-firebase.mdx @@ -1,69 +1,121 @@ --- title: Supabase vs Firebase description: Supabase is the Postgres development platform with a SQL based Database, Auth, and Cloud Functions -author: ant_wilson +author: prashant tags: - comparison -date: '2022-05-26' +date: '2025-08-20' toc_depth: 3 --- +Firebase and Supabase both promise to give teams a full‑featured backend without managing servers. Firebase, owned by Google, combines a NoSQL database with authentication, file storage and serverless functions. Supabase is an open‑source alternative [built on Postgres](/docs/guides/database/overview) that offers similar services—[auth](/docs/guides/auth), [storage](/docs/guides/storage), [real‑time](/docs/guides/realtime), and [functions](/docs/guides/functions), but with the predictability of SQL and the freedom to self‑host. Choosing between them depends on how your application models data, how you plan to scale, and your tolerance for vendor lock‑in. + +## What is Supabase? + +Supabase is an open‑source backend platform that runs on **Postgres**. Each project comes with a dedicated Postgres database, an auto‑generated REST and GraphQL API, real‑time subscriptions, and storage, all tied together with **Row‑Level Security (RLS)**. RLS policies are written in SQL and allow you to define granular access rules using the same language you use for your data. Because Supabase’s core services are built on open technologies like Postgres and GoTrue, you can run Supabase locally or host it yourself using Docker or community tools such as Kubernetes or Terraform. + ## What is Firebase? -Now owned by Google, Firebase is a collection of tools aimed at mobile and web developers. At its core is the Firestore database. +Firebase is a managed platform owned by Google. It includes Cloud Firestore (a document‑oriented NoSQL database), authentication, Cloud Storage for files and Cloud Functions for backend logic. Firestore stores data as JSON‑like documents grouped in collections. The SDKs automatically cache documents on the device so apps can read, write and listen to data while offline; changes are synchronized when the device reconnects[.](https://firebase.google.com/docs/firestore/manage-data/enable-offline#:~:text=Cloud%20Firestore%20supports%20offline%20data,to%20the%20Cloud%20Firestore%20backend) Firestore supports *collection group queries*, which let you query across collections with the same name. To handle relational data you often denormalize documents or perform multiple queries in code. -Firestore allows you to store “documents”. These are collections of key:value pairs where the value can be another sub-document. Document based storage is perfect for unstructured data, since two documents in a collection do not necessarily need to have the same structure. +## Core architecture and database -Firebase also offers other things that web developers find useful like an auth service for user management, and wrappers for other Google services such as Cloud Functions, and File Storage. +| Feature | Firebase | Supabase | +| ------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------ | +| **Database type** | Document database (Cloud Firestore) | Relational database (Postgres) | +| **Data model** | Schemaless JSON; subcollections; no native joins | Structured tables with foreign keys and indexes | +| **Transactions** | Supports transactions and batched writes | Full ACID transactions built into Postgres; you can execute complex joins, subqueries and data transformations in SQL | +| **Offline support** | Firestore caches actively used data on the client so you can read/write while offline; changes sync when connectivity returns | Supabase clients use Postgres directly; offline access requires your own caching strategy and works with Zero, Electric, | +| **API access** | Client SDKs for web, Android, iOS and server environments; REST/gRPC endpoints | Auto‑generated REST API via PostgREST and GraphQL API via pg_graphql | +| **Self‑hosting** | Not supported; services run on Google Cloud | Fully self‑hostable via Docker or on your own cloud | -## What is Supabase? +Firestore’s schemaless design makes it easy to prototype. As data relationships grow more complex, you must handle joins in the client or denormalize your data. Supabase’s Postgres foundation gives you SQL’s expressive power, foreign keys, and indexes from day one. + +## Authentication and user management + +| Feature | Firebase | Supabase | +| ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------ | +| **Login methods** | Email/password, phone, anonymous and OAuth providers; additional SAML and OpenID Connect support requires upgrading to *Firebase Authentication with Identity Platform* | Email/password, OAuth providers (Google, GitHub, Apple, etc.), SMS and custom providers | +| **Enterprise auth** | The Identity Platform upgrade enables multi‑factor auth, SAML and OIDC | SSO, SAML and OIDC are available on Pro and Enterprise plans without changing SDKs | +| **Access control** | Security Rules use a domain‑specific language and differ for Firestore, Storage and Functions | Row‑Level Security (RLS) policies written in SQL control access across the database | +| **Offline auth** | The client SDK persists auth tokens locally and handles session renewal | Session management integrates with Postgres; tokens are validated on the server | +| **Customization** | Custom auth requires writing Cloud Functions or backend services | You can write custom policies and logic in SQL; column‑level security enables field‑level restrictions | + +Firebase Authentication is easy to set up for email/password and social logins. Enterprise features such as SAML/OIDC and blocking functions require upgrading to the Identity Platform and come with new usage limits[.](https://firebase.google.com/docs/auth#:~:text=iOS%20%20%20135%20Web,192%20Unity) Supabase Auth integrates with Postgres, so you can enforce complex policies using SQL and upgrade to SSO or enterprise auth features without changing products. + +## Serverless functions and backend logic + +| Feature | Firebase | Supabase | +| -------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | +| **Runtime** | Cloud Functions run on Google’s infrastructure. You write code in JavaScript or TypeScript and deploy through the Firebase CLI; Python support is available in the second‑generation runtime | Edge Functions run on Deno and are globally distributed at the edge for low‑latency execution | +| **Languages** | JavaScript, TypeScript and Python (2nd gen) | TypeScript (via Deno); you can also call third‑party APIs or write SQL from functions | +| **Triggers** | HTTP requests, Firestore, auth events, Storage, pub/sub and scheduled jobs | HTTP requests, scheduled jobs and other triggers; functions can call Postgres directly through the `supabase-js` client | +| **Cold start** | Functions run in a managed environment; cold starts can vary depending on region and load | Edge Functions start quickly because Deno’s runtime is lightweight and runs close to users | + +Firebase Cloud Functions integrate tightly with other Firebase products and support a wide range of triggers. Supabase Edge Functions are TypeScript functions that run on Deno at edge locations. They talk directly to your Postgres database, which simplifies tasks like handling webhooks or building custom APIs. + +## Storage and file management -Supabase is the Postgres development platform. Instead of being built around a document-based datastore, Supabase offers a relational database management system called PostgreSQL. This comes with a few advantages: +| Feature | Firebase | Supabase | +| ----------------------- | --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | +| **Underlying platform** | Cloud Storage for Firebase (built on Google Cloud Storage) | S3‑compatible object store integrated with Postgres | +| **Resumable uploads** | The web SDK provides `uploadBytesResumable()`and methods to pause, resume or cancel an upload | Built‑in support for resumable uploads; each upload is recorded in a Postgres table | +| **Access control** | Security Rules use a separate rules language for Storage | Storage metadata is stored in Postgres and protected by the same RLS policies as the rest of your data | +| **Image tools** | No built‑in transformations (you can add Firebase Extensions for resizing) | Built‑in image transformations and CDN delivery | +| **Free tier** | Spark plan includes 5 GB of Cloud Storage in select regions; additional usage is billed by region and operation | Free plan allows files up to 50 MB and 1 GB total storage; Pro/Team plans allow up to 500 GB per file and include CDN | -- It’s open source, so there is zero lock in. -- You can query it with SQL, a proven and powerful query language. -- It has a long track record of being used at scale. -- It’s the database of choice for transactional workloads (think apps and websites, or other things that require near-instant responses to queries). -- It comes with decades of [useful postgres extensions and plug-ins](https://supabase.com/docs/guides/database/extensions). +Firebase Storage is reliable and integrates with the auth SDK. Pricing depends on region and includes separate charges for storage, bandwidth and operations. Supabase Storage integrates with Postgres: metadata is stored in a table, so you can apply the same RLS policies you use elsewhere[.](/docs/guides/storage/security/access-control#:~:text=Supabase%20Storage%20is%20designed%20to,RLS) Free plans include 50 MB per file, and Pro plans raise that limit to hundreds of gigabytes. -At Supabase we’ve always been huge fans of Firebase - so we started adding a few things on top of PostgreSQL in an attempt to reach feature parity, including: +## Open source vs. proprietary -- Auto-generated API - [query your data straight from the client](https://supabase.com/docs/guides/api#rest-api-overview). -- Realtime - [changes in your data will be streamed directly to your application](https://supabase.com/docs/reference/dart/subscribe). -- Auth - [a simple to integrate auth system and SQL based rules engine](https://supabase.com/auth). -- Functions - [javascript and typescript functions that deploy out globally](https://supabase.com/edge-functions). -- Storage - [hosting images, videos, and pdfs easily](https://supabase.com/storage). +| Feature | Firebase | Supabase | +| ------------------ | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Core platform** | Proprietary managed services hosted by Google; client SDKs are open source. No self-hosting option. | Fully open source; core components such as Postgres, GoTrue and PostgREST are licensed under permissive licenses, and you can run Supabase yourself | +| **Self‑hosting** | Not available | Supported via CLI and Docker; community tools exist for Kubernetes, Terraform and other platforms | +| **Governance** | Maintained by Google | Community‑driven with a public roadmap and contribution model | +| **Vendor lock‑in** | Tight coupling to Google Cloud products | Portable architecture; you can move between Supabase Cloud and self‑hosted deployments without rewriting your app | -## How are they similar? +Firebase provides a polished, fully managed experience. Supabase embraces open source: you can inspect the code, contribute to improvements and, if needed, host your own Supabase instance. -Both Firebase and Supabase are based on the idea of bringing a superior developer experience to databases. With both platforms you can spin up a new project from directly inside the browser without the need to download any extra tools or software to your machine. Both platforms come with a useful dashboard UI for debugging your data in realtime, which is especially useful for fast iterations when in development. +## Pricing and cost comparison -Both Firebase and Supabase have invested heavily in client side libraries so you can communicate with your database directly from the client. Firebase has their [Firebase Javascript SDK](https://github.com/firebase/firebase-js-sdk) and Supabase has [supabase-js an isomorphic client](https://github.com/supabase/supabase-js/) that can be used both on the client also on the server in a node-js environment. +| Feature | Firebase | Supabase | +| ----------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| **Free tier** | Spark plan includes a limited free quota—for example, Firestore allows 50,000 document reads and 20,000 writes per day, Storage includes 5 GB in select regions and Cloud Functions are limited | Supabase’s free plan includes unlimited API requests, 50,000 monthly active users, 500 MB database storage, 1 GB file storage and 5 GB bandwidth | +| **Billing model** | Pay‑as‑you‑go; you pay per document read, write, delete and per function invocation[.](https://firebase.google.com/docs/firestore/pricing#:~:text=Document%20reads%2050%2C000%20per%20day,transfer%2010%20GiB%20per%20month) Pricing varies by region and can be hard to predict | Transparent tiered pricing: pay for database storage, file storage, and compute. There are no charges for API requests, and you can track usage from the dashboard | +| **Self‑hosting** | Not supported | Available—self‑hosting lets you control costs and comply with regulatory requirements | -## How are they different? +Firebase’s usage‑based pricing can surprise teams as their apps grow because every document read, write or listener contributes to cost[.](https://firebase.google.com/docs/firestore/pricing#:~:text=Document%20reads%2050%2C000%20per%20day,transfer%2010%20GiB%20per%20month) Supabase offers predictable tiers and does not bill per request, which can simplify budgeting[.](/pricing#:~:text=Get%20started%20with%3A) -Firebase and Supabase differ in several ways. The main one being that Firebase is a document store, whereas Supabase is based on PostgreSQL - a relational, SQL-based database management system. +## Ecosystem, extensibility and community -There are some other important differences. +| Feature | Firebase | Supabase | +| ------------------------ | --------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| **Platform integration** | Deep integration with Google Cloud services such as BigQuery, Cloud Functions and Firebase ML | Built on Postgres; you can leverage Postgres extensions, listen to database events and integrate with third‑party services | +| **Extensions** | Firebase Extensions provide pre‑built functionality (e.g., image resizing, Stripe sync) | Postgres extensions (50+ preconfigured) add features like full‑text search, vector similarity and custom data types | +| **SDKs** | Official SDKs for web, Android, iOS, Unity, C++ and more | Official JavaScript client and community clients for Go, Rust, Dart and other languages | +| **Community** | Support via GitHub issues, Stack Overflow and Firebase Summit | Active open‑source community with GitHub discussions, the SupaSquad advocacy program and third‑party integrations | +| **Self‑hosting tools** | None | Community‑maintained packages for Terraform, Kubernetes and BYO cloud | -### Open Source +Firebase benefits from Google’s ecosystem and has mature SDKs for many platforms. Supabase leverages the Postgres ecosystem and open‑source contributions; you can extend it with Postgres extensions or third‑party packages. -Supabase is open source. Along with the hosted cloud platform, you can also take the Supabase stack and host it inside your own cloud or run it locally on your machine. There is no vendor lock in. +## Scalability and performance -### Pricing +Firestore scales automatically and handles real‑time synchronization for simple use cases. Queries are shallow and cannot traverse relationships; collection group queries allow searching across collections with the same name[.](https://firebase.blog/posts/2019/06/understanding-collection-group-queries/#:~:text=In%20the%20past%2C%20you%20could,are%20in%20a%20single%20subcollection) Firestore caches data on the client so offline access is good. Deep querying often requires multiple reads or denormalized data, which can increase costs[.](https://firebase.google.com/docs/firestore/pricing#:~:text=Document%20reads%2050%2C000%20per%20day,transfer%2010%20GiB%20per%20month) -[Firebase charges for reads, writes and deletes](https://firebase.google.com/pricing), which can lead to some unpredictability, especially in the early stages of a project when your application is in heavy development. Supabase [charges based on the amount of data stored](https://supabase.com/pricing), with breathing room for unlimited API requests and an unlimited number of Auth users. +Supabase inherits Postgres’s mature scaling features. You can perform joins, subqueries and full‑text search in a single SQL statement. Transactions guarantee consistency, and indexes keep queries fast. Real‑time updates are delivered via Postgres logical replication, and connection pooling and read replicas can be added as your load grows. Because Supabase is built on a relational database, data remains strongly consistent, and performance tuning is well understood in the Postgres ecosystem. -### Performance +## Migrating from Firebase to Supabase -We created a benchmarking repo where you can compare the performance of both services in different scenarios. Our most recent results show that [Supabase outperforms Firebase by up to 4x](https://github.com/supabase/benchmarks/issues/8) on number of reads per second, and 3.1x on writes per second. +Many teams start with Firebase for fast prototyping and switch to Supabase when their data model becomes relational or costs become unpredictable. A typical migration involves two phases: -## How do I migrate from Firebase to Supabase? +1. **Run both services side by side.** Export your Firestore collections and import them into Supabase—initially as JSON or JSONB columns—while your app continues to read from Firebase. Then incrementally swap Firebase SDK calls for Supabase queries. +2. **Normalize your data.** Refactor JSON into proper Postgres tables, add foreign keys and indexes and write RLS policies. This is when you begin to enjoy SQL’s power and simplified code. -Since Firebase is document based, migrating into a relational database requires you to map your data structure across into a SQL schema. Luckily we’ve built a [handy conversion tool to do it for you](https://supabase.com/docs/guides/migrations/firestore-data). +Supabase provides [open‑source migration tools](/docs/guides/platform/migrating-to-supabase). There is a utility for [migrating Firebase Authentication to Supabase Auth](/docs/guides/platform/migrating-to-supabase/firebase-auth) by exporting users and re‑creating them in Postgres, another for [copying Firestore collections to Postgres tables](/docs/guides/platform/migrating-to-supabase/firestore-data), and a tool for [migrating files from Cloud Storage to Supabase Storage](/docs/guides/platform/migrating-to-supabase/firebase-storage). These tools are maintained on GitHub and have been used by startups to migrate tens of thousands of users with minimal downtime. -We also have guides and tools for [migrating Firebase Auth to Supabase Auth](https://supabase.com/docs/guides/migrations/firebase-auth) for [migrating Firebase Storage files to Supabase Storage](https://supabase.com/docs/guides/migrations/firebase-storage). +## Conclusion -These are by far the most complete Firebase to Postgres migration tools available anywhere on the web. +Firebase offers an excellent developer experience for prototypes and simple applications. It excels at real‑time synchronization and has a generous free tier. Supabase provides a similar developer experience with a relational core. By building on Postgres and open‑source components, Supabase gives you SQL queries, ACID transactions, RLS and the option to self‑host. For teams that anticipate complex data relationships, need fine‑grained access control, or want predictable costs and open‑source flexibility, Supabase is a compelling alternative. -You can [try Supabase for free](https://supabase.com/dashboard). If you require Enterprise level support with your project or migration, please get in touch using our [Enterprise contact form](https://forms.supabase.com/enterprise). +Need help migrating from Firebase to Supabase? [Contact](https://forms.supabase.com/firebase-migration) our Firebase Migration Team. diff --git a/apps/www/_blog/2025-08-12-supabase-auth-build-vs-buy.mdx b/apps/www/_blog/2025-08-12-supabase-auth-build-vs-buy.mdx new file mode 100644 index 0000000000000..bcc842f0e83da --- /dev/null +++ b/apps/www/_blog/2025-08-12-supabase-auth-build-vs-buy.mdx @@ -0,0 +1,175 @@ +--- +title: 'Supabase Auth: Build vs. Buy' +description: 'The reasons why (and why not) to use Supabase Auth instead of building your own.' +categories: + - auth +tags: + - auth +date: '2025-08-12:10:00' +toc_depth: 2 +author: prashant +image: supabase-auth-build-vs-buy/supabase-auth-build-vs-buy-og.png +thumb: supabase-auth-build-vs-buy/supabase-auth-build-vs-buy-og.png +--- + +Authentication appears in nearly every application but is rarely the core value proposition. Yet development teams often spend weeks or months building, testing, and maintaining auth systems. Let's explore the real costs of building authentication from scratch versus using a solution like Supabase Auth. + +## How Supabase Auth works under the hood + +Before diving into the cost analysis, it's important to understand what Supabase Auth is: + +1. **Postgres-native authentication**: Supabase Auth stores users directly in your Postgres database in the `auth.users` table, not in a separate service +2. **JWT-based**: Authorization give you total control over security +3. **Row Level Security integration**: Seamlessly connects authentication with Postgres's RLS policies +4. **Hybrid token architecture**: Supabase Auth issues stateless JWTs for access control, while maintaining refresh tokens and session data in your Postgres database for secure, persistent login + +## The hidden costs of building your own auth + +When teams decide to build authentication, they're often thinking about the initial implementation only. But auth requires ongoing maintenance that can drain resources from your core product development. You need to consider: + +- The time investment required to build auth from scratch +- The ongoing maintenance required to keep auth functioning properly +- The security risks associated with running auth systems yourself +- The considerable time and skills involved in adding new authentication protocols + +Even if you were to begin your auth investments using open-source projects, it’s typical for these open-source projects to be abandoned. This requires significant investments in upkeep and maintenance on your part. + +### Time investment + +Building basic authentication functionality typically requires: + +- **2-4 weeks** for a senior developer to implement email/password login, session management, and password reset functionality +- **1-2 weeks** for implementing each additional auth provider (Google, GitHub, etc.) +- **1-2 weeks** for security reviews and penetration testing +- **1-3 weeks** for implementing MFA and other security features +- **8+ weeks** for implementing SAML (which, itself, requires significant maintenance investment) + +This assumes you already have expertise in security best practices, JWT handling, and session management. + +### Ongoing maintenance + +Authentication isn't a "build once and forget" component: + +- Security vulnerabilities require immediate attention +- Auth providers regularly update their APIs and requirements +- Password storage standards evolve +- Compliance requirements change over time +- User management features grow more complex as you scale +- New features, such as supporting one-time access codes, become more important + +Borrowing engineering time to maintain auth systems is a sub-optimal use of resources. + +### Security risks + +Authentication is security-critical infrastructure where mistakes can be catastrophic: + +- Token management vulnerabilities, such as session tokens +- Password storage failures, such as weak hashing +- Session handling issues +- CSRF/XSS vulnerabilities + +These issues often aren't apparent until a breach occurs, with potentially devastating consequences. For most businesses, the time spent hardening and re-hardening auth systems, to say nothing of the time required and reputational hit caused by having to fix compromised auth systems, is simply not worth it when weighed against other priorities. + +## The Supabase Auth approach + +Supabase takes a different approach by providing authentication that's: + +1. **Integrated with your database**: Supabase Auth is deeply integrated with your Postgres instance via the `auth` schema. It stores user data in the `auth.users` table, manages tokens with Postgres functions and triggers, and integrates seamlessly with Row Level Security (RLS) for fine-grained access control, all without requiring you to manage auth logic in your own code. +2. **Open source**: No vendor lock-in, with the ability to self-host if needed. +3. **Developer-focused**: Simple APIs with client libraries for major frameworks. +4. **Secure**: Supabase maintains security best practices for token issuance, password hashing (using `bcrypt`), and provider updates so you don’t have to monitor every standards update. However, developers are still responsible for securely handling user data and managing secure client environments. +5. **Extensible**: Edge Functions enable custom auth logic to implement post-signup profile creation, role assignment based on domains, and third-party webhooks for things like sending Discord invites or syncing with your CRM. + +### Time to market + +Using Supabase Auth typically means: + +- **30 minutes to 2 hours**: Basic implementation time for email/password auth +- **15-30 minutes**: Adding each additional provider (Google, GitHub, etc.) +- **1-2 days**: Implementing row-level security policies +- Security updates handled by Supabase + +This represents a 90-95% reduction in time-to-production compared to building from scratch. + +### Cost comparison + +Beyond the direct engineering time, there's the opportunity cost of resources diverted from your core product: + +| Activity | Build (hours) | Supabase Auth (hours) | +| ------------------------- | ------------------ | --------------------- | +| Initial implementation | 160-320 | 4-16 | +| Adding social providers | 40-80 per provider | 0.5-1 per provider | +| Security updates (yearly) | 40-120 | 0 | +| User management features | 80-160 | 0-8 | +| Total first year (est.) | 320-680 | 4-24 | + +At an average engineering cost of $150/hour, that's $47,400-$98,700 saved in the first year alone. For most companies, the 320-680 hours invested in building their own auth system could be channeled towards, at minimum, one category-defining feature. + +### Flexibility + +Supabase Auth supports: + +- Email/password authentication +- Magic link (passwordless) login +- Phone auth via Twilio integration +- OAuth providers (Google, GitHub, Azure, Apple, etc.) +- Custom claims and user metadata +- JWT and session-based auth +- Row-level security integration +- Passkey support (coming soon) + +All without writing the underlying authentication code yourself. + +## Choosing between Supabase Auth and Auth0 + +Many teams evaluate Supabase Auth against Auth0, another popular authentication service. Here's how they compare: + +| Feature | Supabase Auth | Auth0 | +| ------------------------ | ---------------------------------------------- | ----------------------------------------- | +| **Architecture** | Postgres-native, self-hostable | Cloud-only SaaS, proprietary | +| **Pricing Model** | Based on compute and storage | Per monthly active user (MAU) | +| **Database Integration** | Direct Postgres RLS | Separate system from your database | +| **Enterprise Features** | Self-hosting for compliance | Built-in SAML, LDAP, compliance tools | +| **Developer Experience** | Code-first, flexible APIs, Supabase UI Library | Dashboard-driven, extensive UI components | +| Open Source | Fully open source | Closed source | +| **Customization** | Full source code access | Rules, Actions, and Hooks | + +**Choose Supabase Auth when:** + +- You're already using Postgres and want direct database integration +- You need cost predictability at scale (no per-user pricing) +- You value open source and potential self-hosting +- You prefer a code-first, API-driven approach + +**Choose Auth0 when:** + +- You need enterprise features like SAML and LDAP out of the box +- You have complex multi-tenant B2B requirements +- You need extensive compliance certifications immediately + +Both are excellent choices, but Supabase Auth typically offers significant cost advantages at scale while providing deeper database integration. + +## When should you build your own auth? + +Despite the advantages of Supabase Auth, there are legitimate reasons to build your own: + +1. **Specialized compliance requirements**: If you have unique regulatory needs that off-the-shelf solutions don't address +2. **Deep integration with legacy systems**: When you need authentication tightly coupled with existing proprietary systems +3. **Extremely unique authentication flows**: For highly specialized authentication requirements not supported by existing providers + +## Making the decision + +When considering whether to build or buy authentication, ask yourself: + +1. Is authentication a core differentiator for our product? +2. Do we have security expertise on our team? +3. Are we prepared to maintain this critical infrastructure indefinitely? +4. Could the engineering time be better spent on our core value proposition? + +For most applications, authentication is essential infrastructure but not a competitive advantage. Using Supabase Auth lets you focus on what makes your application unique while leveraging battle-tested security. + +## Getting started + +- [Read the documentation](/docs/guides/auth). Implementing Supabase Auth takes just a few lines of code. +- Try our [quickstart guide](/docs/guides/getting-started/quickstarts/nextjs). Supabase offers framework-specific packages that handle auth state, protected routes, and server-side rendering. +- [Contact us](/contact/sales) if you want a more detailed analysis of Supabase Auth for your business, including pricing estimates and comparisons. diff --git a/apps/www/app/api-v2/opt-out/[ref]/route.ts b/apps/www/app/api-v2/opt-out/[ref]/route.ts new file mode 100644 index 0000000000000..9028a6934b29a --- /dev/null +++ b/apps/www/app/api-v2/opt-out/[ref]/route.ts @@ -0,0 +1,102 @@ +import { NextRequest, NextResponse } from 'next/server' +import { createClient } from '@supabase/supabase-js' + +const supabaseUrl = process.env.NEXT_PUBLIC_EMAIL_ABUSE_URL as string +const supabaseServiceKey = process.env.EMAIL_ABUSE_SERVICE_KEY as string +const hcaptchaSecret = process.env.HCAPTCHA_SECRET_KEY as string + +// Function to verify hCaptcha token +async function verifyCaptcha(token: string): Promise { + try { + const response = await fetch('https://api.hcaptcha.com/siteverify', { + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + body: new URLSearchParams({ + secret: hcaptchaSecret, + response: token, + }), + }) + + const data = await response.json() + return data.success === true + } catch (error) { + console.error('Error verifying captcha:', error) + return false + } +} + +export async function POST(req: NextRequest, props: { params: Promise<{ ref: string }> }) { + const params = await props.params + const ref = params.ref + const { reason, email, captchaToken } = await req.json() + + // Validate reason + const allowedReasons = ['phishing', 'advertisement', 'malware', 'scam', 'other'] + if (!allowedReasons.includes(reason)) { + return NextResponse.json({ error: 'Bad Request: Invalid reason provided.' }, { status: 400 }) + } + + const supabase = createClient(supabaseUrl, supabaseServiceKey) + + if (!ref) { + return NextResponse.json( + { error: 'Bad Request: Missing or invalid project reference.' }, + { status: 400 } + ) + } + + const refPattern = /^[a-zA-Z]{20}$/ + const refIsInvalid = !refPattern.test(ref) + + if (refIsInvalid) { + return NextResponse.json( + { error: 'Bad Request: Missing or invalid project reference.' }, + { status: 400 } + ) + } + + // Verify captcha token + if (!captchaToken) { + return NextResponse.json( + { error: 'Bad Request: Missing captcha verification.' }, + { status: 400 } + ) + } + + const isValidCaptcha = await verifyCaptcha(captchaToken) + if (!isValidCaptcha) { + return NextResponse.json( + { error: 'Bad Request: Invalid captcha verification.' }, + { status: 400 } + ) + } + + try { + const { error: supabaseError } = await supabase + .from('manual_reports') + .insert([{ project_ref: ref, reason, email }]) + + if (supabaseError) throw new Error(`Supabase error: ${supabaseError.message}`) + + const response = await fetch(process.env.EMAIL_REPORT_SLACK_WEBHOOK as string, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ text: `New report from: ${ref} \n\n ${reason}` }), + }) + + if (!response.ok) throw new Error('Failed to send to Slack') + + return NextResponse.json( + { message: 'Thank you! We have received your report.' }, + { status: 200 } + ) + } catch (error) { + const errorMessage = (error as Error).message + return NextResponse.json( + { error: `Failure: Could not send post to Slack. Error: ${errorMessage}` }, + { status: 500 } + ) + } +} diff --git a/apps/www/app/api-v2/submit-form-contact-sales/route.tsx b/apps/www/app/api-v2/submit-form-contact-sales/route.tsx new file mode 100644 index 0000000000000..4f2e1aed08eb9 --- /dev/null +++ b/apps/www/app/api-v2/submit-form-contact-sales/route.tsx @@ -0,0 +1,111 @@ +const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type', +} + +const personalEmailDomains = [ + '@gmail.com', + '@yahoo.com', + '@hotmail.', + '@outlook.com', + '@aol.com', + '@icloud.com', + '@live.com', + '@protonmail.com', + '@mail.com', + '@example.com', +] + +const isValidEmail = (email: string): boolean => { + const emailPattern = /^[\w-\.+]+@([\w-]+\.)+[\w-]{2,8}$/ + return emailPattern.test(email) +} + +const isCompanyEmail = (email: string): boolean => { + for (const domain of personalEmailDomains) { + if (email.includes(domain)) { + return false + } + } + + return true +} + +export async function POST(req: Request) { + const HUBSPOT_PORTAL_ID = process.env.HUBSPOT_PORTAL_ID + const HUBSPOT_FORM_GUID = process.env.HUBSPOT_ENTERPRISE_FORM_GUID + + const body = await req.json() + const { firstName, secondName, companyEmail, message } = body + + if (!firstName || !secondName || !companyEmail || !message) { + return new Response(JSON.stringify({ message: 'All fields are required' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + // Validate email + if (companyEmail && !isValidEmail(companyEmail)) { + return new Response(JSON.stringify({ message: 'Invalid email address' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + // Validate company email + if (companyEmail && !isCompanyEmail(companyEmail)) { + return new Response(JSON.stringify({ message: 'Please use a company email address' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + try { + const response = await fetch( + `https://api.hsforms.com/submissions/v3/integration/submit/${HUBSPOT_PORTAL_ID}/${HUBSPOT_FORM_GUID}`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + fields: [ + { objectTypeId: '0-1', name: 'firstname', value: firstName }, + { objectTypeId: '0-1', name: 'lastname', value: secondName }, + { objectTypeId: '0-1', name: 'email', value: companyEmail }, + { objectTypeId: '0-1', name: 'message', value: message }, + ], + context: { + pageUri: 'https://supabase.com/contact/sales', + pageName: 'Enterprise Demo Request Form', + }, + legalConsentOptions: { + consent: { + consentToProcess: true, + text: 'By submitting this form, I confirm that I have read and understood the Privacy Policy.', + }, + }, + }), + } + ) + + if (!response.ok) { + const errorData = await response.json() + return new Response(JSON.stringify({ message: errorData.message }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: response.status, + }) + } + + return new Response(JSON.stringify({ message: 'Submission successful' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }) + } catch (error: any) { + return new Response(JSON.stringify({ error: error.message }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }) + } +} diff --git a/apps/www/app/api-v2/submit-form-sos2025-newsletter/route.tsx b/apps/www/app/api-v2/submit-form-sos2025-newsletter/route.tsx new file mode 100644 index 0000000000000..856845ad620f7 --- /dev/null +++ b/apps/www/app/api-v2/submit-form-sos2025-newsletter/route.tsx @@ -0,0 +1,75 @@ +const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type', +} + +const isValidEmail = (email: string): boolean => { + const emailPattern = /^[\w-\.+]+@([\w-]+\.)+[\w-]{2,8}$/ + return emailPattern.test(email) +} + +export async function POST(req: Request) { + const HUBSPOT_PORTAL_ID = process.env.HUBSPOT_PORTAL_ID + const HUBSPOT_FORM_GUID = '721fc4aa-13eb-4c25-91be-4fe9b530bed1' + + const body = await req.json() + const { email } = body + + if (!email) { + return new Response(JSON.stringify({ message: 'All fields are required' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + // Validate email + if (email && !isValidEmail(email)) { + return new Response(JSON.stringify({ message: 'Invalid email address' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + try { + const response = await fetch( + `https://api.hsforms.com/submissions/v3/integration/submit/${HUBSPOT_PORTAL_ID}/${HUBSPOT_FORM_GUID}`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + fields: [{ objectTypeId: '0-1', name: 'email', value: email }], + context: { + pageUri: 'https://supabase.com/state-of-startups', + pageName: 'State of Startups 2025', + }, + legalConsentOptions: { + consent: { + consentToProcess: true, + text: 'By submitting this form, I confirm that I have read and understood the Privacy Policy.', + }, + }, + }), + } + ) + + if (!response.ok) { + const errorData = await response.json() + return new Response(JSON.stringify({ message: errorData.message }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: response.status, + }) + } + + return new Response(JSON.stringify({ message: 'Submission successful' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }) + } catch (error: any) { + return new Response(JSON.stringify({ error: error.message }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }) + } +} diff --git a/apps/www/app/api-v2/submit-form-talk-to-partnership/route.tsx b/apps/www/app/api-v2/submit-form-talk-to-partnership/route.tsx new file mode 100644 index 0000000000000..b41b906430061 --- /dev/null +++ b/apps/www/app/api-v2/submit-form-talk-to-partnership/route.tsx @@ -0,0 +1,110 @@ +const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type', +} + +const personalEmailDomains = [ + '@gmail.com', + '@yahoo.com', + '@hotmail.', + '@outlook.com', + '@aol.com', + '@icloud.com', + '@live.com', + '@protonmail.com', + '@mail.com', + '@example.com', +] + +const isValidEmail = (email: string): boolean => { + const emailPattern = /^[\w-\.+]+@([\w-]+\.)+[\w-]{2,8}$/ + return emailPattern.test(email) +} + +const isCompanyEmail = (email: string): boolean => { + for (const domain of personalEmailDomains) { + if (email.includes(domain)) { + return false + } + } + + return true +} + +export async function POST(req: Request) { + const HUBSPOT_PORTAL_ID = process.env.HUBSPOT_PORTAL_ID + const HUBSPOT_FORM_GUID = process.env.HUBSPOT_PARTNERSHIP_FORM_GUID + + const body = await req.json() + const { firstName, secondName, companyEmail } = body + + if (!firstName || !secondName || !companyEmail) { + return new Response(JSON.stringify({ message: 'All fields are required' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + // Validate email + if (companyEmail && !isValidEmail(companyEmail)) { + return new Response(JSON.stringify({ message: 'Invalid email address' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + // Validate company email + if (companyEmail && !isCompanyEmail(companyEmail)) { + return new Response(JSON.stringify({ message: 'Please use a company email address' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 422, + }) + } + + try { + const response = await fetch( + `https://api.hsforms.com/submissions/v3/integration/submit/${HUBSPOT_PORTAL_ID}/${HUBSPOT_FORM_GUID}`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + fields: [ + { objectTypeId: '0-1', name: 'firstname', value: firstName }, + { objectTypeId: '0-1', name: 'lastname', value: secondName }, + { objectTypeId: '0-1', name: 'email', value: companyEmail }, + ], + context: { + pageUri: 'https://supabase.com/solutions/ai-builders', + pageName: 'Solutions / AI Builders', + }, + legalConsentOptions: { + consent: { + consentToProcess: true, + text: 'By submitting this form, I confirm that I have read and understood the Privacy Policy.', + }, + }, + }), + } + ) + + if (!response.ok) { + const errorData = await response.json() + return new Response(JSON.stringify({ message: errorData.message }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: response.status, + }) + } + + return new Response(JSON.stringify({ message: 'Submission successful' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }) + } catch (error: any) { + return new Response(JSON.stringify({ error: error.message }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }) + } +} diff --git a/apps/www/public/images/blog/supabase-auth-build-vs-buy/supabase-auth-build-vs-buy-og.png b/apps/www/public/images/blog/supabase-auth-build-vs-buy/supabase-auth-build-vs-buy-og.png new file mode 100644 index 0000000000000..21ff7ad72bd3c Binary files /dev/null and b/apps/www/public/images/blog/supabase-auth-build-vs-buy/supabase-auth-build-vs-buy-og.png differ diff --git a/apps/www/public/images/logos/publicity/fixed-width/soshi.svg b/apps/www/public/images/logos/publicity/fixed-width/soshi.svg new file mode 100644 index 0000000000000..b9d17d702642c --- /dev/null +++ b/apps/www/public/images/logos/publicity/fixed-width/soshi.svg @@ -0,0 +1,4 @@ + + + + diff --git a/apps/www/public/images/logos/publicity/soshi.svg b/apps/www/public/images/logos/publicity/soshi.svg new file mode 100644 index 0000000000000..274049317cc0e --- /dev/null +++ b/apps/www/public/images/logos/publicity/soshi.svg @@ -0,0 +1,4 @@ + + + + diff --git a/apps/www/public/rss.xml b/apps/www/public/rss.xml index 8ec0f09ffee83..e9c5ad184c3d0 100644 --- a/apps/www/public/rss.xml +++ b/apps/www/public/rss.xml @@ -5,9 +5,16 @@ https://supabase.com Latest news from Supabase en - Fri, 18 Jul 2025 00:00:00 -0700 + Tue, 12 Aug 2025 00:00:00 -0700 + https://supabase.com/blog/supabase-auth-build-vs-buy + Supabase Auth: Build vs. Buy + https://supabase.com/blog/supabase-auth-build-vs-buy + The reasons why (and why not) to use Supabase Auth instead of building your own. + Tue, 12 Aug 2025 00:00:00 -0700 + + https://supabase.com/blog/launch-week-15-top-10 Top 10 Launches of Launch Week 15 https://supabase.com/blog/launch-week-15-top-10 diff --git a/packages/ui/src/components/Button/Button.tsx b/packages/ui/src/components/Button/Button.tsx index 20513b69d4867..df71885573960 100644 --- a/packages/ui/src/components/Button/Button.tsx +++ b/packages/ui/src/components/Button/Button.tsx @@ -6,7 +6,6 @@ import { Loader2 } from 'lucide-react' import { cloneElement, forwardRef, isValidElement } from 'react' import { SIZE_VARIANTS, SIZE_VARIANTS_DEFAULT } from '../../lib/constants' import { cn } from '../../lib/utils/cn' -import { IconContext } from '../Icon/IconContext' export type ButtonVariantProps = VariantProps const buttonVariants = cva( @@ -255,6 +254,11 @@ const Button = forwardRef( {...props} disabled={disabled} className={cn(buttonVariants({ type, size, disabled, block, rounded }), className)} + onClick={(e) => { + // [Joshen] Prevents redirecting if Button is used with a link-based child element + if (disabled) return e.preventDefault() + else props?.onClick?.(e) + }} > {asChild ? ( isValidElement(children) ? (