diff --git a/apps/docs/app/api/graphql/tests/searchDocs.test.ts b/apps/docs/app/api/graphql/tests/searchDocs.test.ts index 2f1f9524a9257..92b9012a862d2 100644 --- a/apps/docs/app/api/graphql/tests/searchDocs.test.ts +++ b/apps/docs/app/api/graphql/tests/searchDocs.test.ts @@ -4,7 +4,9 @@ import { type OpenAIClientInterface } from '~/lib/openAi' import { ApiError } from '../../utils' import { POST } from '../route' -const contentEmbeddingMock = vi.fn().mockImplementation(async () => Result.ok([0.1, 0.2, 0.3])) +const contentEmbeddingMock = vi + .fn() + .mockImplementation(async () => Result.ok({ embedding: [0.1, 0.2, 0.3], tokenCount: 10 })) const openAIMock: OpenAIClientInterface = { createContentEmbedding: contentEmbeddingMock, } diff --git a/apps/docs/app/api/utils.ts b/apps/docs/app/api/utils.ts index 652ea444e598e..ffdfc4f4985ac 100644 --- a/apps/docs/app/api/utils.ts +++ b/apps/docs/app/api/utils.ts @@ -64,6 +64,16 @@ export class NoDataError
extends ApiError } } +export class FileNotFoundError
extends Error { + constructor( + message: string, + error: Error, + public details?: Details + ) { + super(`FileNotFound: ${message}`, { cause: error }) + } +} + export class MultiError extends Error { constructor( message: string, @@ -79,7 +89,7 @@ export class MultiError).push(error) + ;((this.cause ??= []) as Array).push(error) return this } } diff --git a/apps/docs/content/guides/realtime/broadcast.mdx b/apps/docs/content/guides/realtime/broadcast.mdx index 38eb434e25119..fb544f58e5f35 100644 --- a/apps/docs/content/guides/realtime/broadcast.mdx +++ b/apps/docs/content/guides/realtime/broadcast.mdx @@ -691,3 +691,204 @@ You can pass configuration options while initializing the Supabase Client. Use this to guarantee that the server has received the message before resolving `channelD.send`'s promise. If the `ack` config is not set to `true` when creating the channel, the promise returned by `channelD.send` will resolve immediately. + +### Send messages using REST calls + +You can also send a Broadcast message by making an HTTP request to Realtime servers. This is useful when you want to send messages from your server or client without having to first establish a WebSocket connection. + + + + + + This is currently available only in the Supabase JavaScript client version 2.37.0 and later. + + + + ```js + const channel = supabase.channel('test-channel') + + // No need to subscribe to channel + + channel + .send({ + type: 'broadcast', + event: 'test', + payload: { message: 'Hi' }, + }) + .then((resp) => console.log(resp)) + + // Remember to clean up the channel + + supabase.removeChannel(channel) + + ``` + + + + ```dart + // No need to subscribe to channel + + final channel = supabase.channel('test-channel'); + final res = await channel.sendBroadcastMessage( + event: "test", + payload: { + 'message': 'Hi', + }, + ); + print(res); + ``` + + + + ```swift + let myChannel = await supabase.channel("room-2") { + $0.broadcast.acknowledgeBroadcasts = true + } + + // No need to subscribe to channel + + await myChannel.broadcast(event: "test", message: ["message": "HI"]) + ``` + + + + ```kotlin + val myChannel = supabase.channel("room-2") { + broadcast { + acknowledgeBroadcasts = true + } + } + + // No need to subscribe to channel + + myChannel.broadcast(event = "test", buildJsonObject { + put("message", "Hi") + }) + ``` + + + + Unsupported in Python yet. + + + +## Trigger broadcast messages from your database + + + +This feature is currently in Public Alpha. If you have any issues [submit a support ticket](https://supabase.help). + + + +### How it works + +Broadcast Changes allows you to trigger messages from your database. To achieve it Realtime is directly reading your WAL (Write Append Log) file using a publication against the `realtime.messages` table so whenever a new insert happens a message is sent to connected users. + +It uses partitioned tables per day which allows the deletion your previous messages in a performant way by dropping the physical tables of this partitioned table. Tables older than 3 days old are deleted. + +Broadcasting from the database works like a client-side broadcast, using WebSockets to send JSON packages. [Realtime Authorization](/docs/guides/realtime/authorization) is required and enabled by default to protect your data. + +The database broadcast feature provides two functions to help you send messages: + +- `realtime.send` will insert a message into realtime.messages without a specific format. +- `realtime.broadcast_changes` will insert a message with the required fields to emit database changes to clients. This helps you set up triggers on your tables to emit changes. + +### Broadcasting a message from your database + +The `realtime.send` function provides the most flexibility by allowing you to broadcast messages from your database without a specific format. This allows you to use database broadcast for messages that aren't necessarily tied to the shape of a Postgres row change. + +```sql +SELECT realtime.send ( + to_jsonb ('{}'::text), -- JSONB Payload + 'event', -- Event name + 'topic', -- Topic + FALSE -- Public / Private flag +); +``` + +### Broadcast record changes + +#### Setup realtime authorization + +Realtime Authorization is required and enabled by default. To allow your users to listen to messages from topics, create a RLS (Row Level Security) policy: + +```sql +CREATE POLICY "authenticated can receive broadcasts" +ON "realtime"."messages" +FOR SELECT +TO authenticated +USING ( true ); + +``` + +See the [Realtime Authorization](/docs/guides/realtime/authorization) docs to learn how to set up more specific policies. + +#### Set up trigger function + +First, set up a trigger function that uses `realtime.broadcast_changes` to insert an event whenever it is triggered. The event is set up to include data on the schema, table, operation, and field changes that triggered it. + +For this example use case, we want to have a topic with the name `topic:` to which we're going to broadcast events. + +```sql +CREATE OR REPLACE FUNCTION public.your_table_changes() +RETURNS trigger +SECURITY DEFINER SET search_path = '' +AS $$ +BEGIN + PERFORM realtime.broadcast_changes( + 'topic:' || NEW.id::text, -- topic + TG_OP, -- event + TG_OP, -- operation + TG_TABLE_NAME, -- table + TG_TABLE_SCHEMA, -- schema + NEW, -- new record + OLD -- old record + ); + RETURN NULL; +END; +$$ LANGUAGE plpgsql; +``` + +Of note are the Postgres native trigger special variables used: + +- `TG_OP` - the operation that triggered the function +- `TG_TABLE_NAME` - the table that caused the trigger +- `TG_TABLE_SCHEMA` - the schema of the table that caused the trigger invocation +- `NEW` - the record after the change +- `OLD` - the record before the change + +You can read more about them in this [guide](https://www.postgresql.org/docs/current/plpgsql-trigger.html#PLPGSQL-DML-TRIGGER). + +#### Set up trigger + +Next, set up a trigger so the function runs whenever your target table has a change. + +```sql +CREATE TRIGGER broadcast_changes_for_your_table_trigger +AFTER INSERT OR UPDATE OR DELETE ON public.your_table +FOR EACH ROW +EXECUTE FUNCTION your_table_changes (); +``` + +As you can see, it will be broadcasting all operations so our users will receive events when records are inserted, updated or deleted from `public.your_table` . + +#### Listen on client side + +Finally, client side will requires to be set up to listen to the topic `topic:` to receive the events. + +```jsx +const gameId = 'id' +await supabase.realtime.setAuth() // Needed for Realtime Authorization +const changes = supabase + .channel(`topic:${gameId}`) + .on('broadcast', { event: 'INSERT' }, (payload) => console.log(payload)) + .on('broadcast', { event: 'UPDATE' }, (payload) => console.log(payload)) + .on('broadcast', { event: 'DELETE' }, (payload) => console.log(payload)) + .subscribe() +``` diff --git a/apps/docs/content/guides/realtime/postgres-changes.mdx b/apps/docs/content/guides/realtime/postgres-changes.mdx index 395deda2828cd..3cdf5b5a1d686 100644 --- a/apps/docs/content/guides/realtime/postgres-changes.mdx +++ b/apps/docs/content/guides/realtime/postgres-changes.mdx @@ -91,8 +91,19 @@ In this example we'll set up a database table, secure it with Row Level Security Go to your project's [Publications settings](https://supabase.com/dashboard/project/_/database/publications), and under `supabase_realtime`, toggle on the tables you want to listen to. + Alternatively, add tables to the `supabase_realtime` publication by running the given SQL: + + + + ```sql + alter publication supabase_realtime + add table your_table_name; + ``` + + + diff --git a/apps/docs/features/docs/GuidesMdx.utils.tsx b/apps/docs/features/docs/GuidesMdx.utils.tsx index 6dfce8538849b..86088c497f3e0 100644 --- a/apps/docs/features/docs/GuidesMdx.utils.tsx +++ b/apps/docs/features/docs/GuidesMdx.utils.tsx @@ -1,18 +1,20 @@ -import matter from 'gray-matter' +import * as Sentry from '@sentry/nextjs' import { fromMarkdown } from 'mdast-util-from-markdown' import { gfmFromMarkdown } from 'mdast-util-gfm' import { gfm } from 'micromark-extension-gfm' import { type Metadata, type ResolvingMetadata } from 'next' import { notFound } from 'next/navigation' -import { readFile, readdir } from 'node:fs/promises' -import { extname, join, sep } from 'node:path' +import { readdir } from 'node:fs/promises' +import { extname, join, relative, sep } from 'node:path' +import { extractMessageFromAnyError, FileNotFoundError } from '~/app/api/utils' import { pluckPromise } from '~/features/helpers.fn' import { cache_fullProcess_withDevCacheBust, existsFile } from '~/features/helpers.fs' import type { OrPromise } from '~/features/helpers.types' import { generateOpenGraphImageMeta } from '~/features/seo/openGraph' import { BASE_PATH } from '~/lib/constants' import { GUIDES_DIRECTORY, isValidGuideFrontmatter, type GuideFrontmatter } from '~/lib/docs' +import { GuideModelLoader } from '~/resources/guide/guideModelLoader' import { newEditLink } from './GuidesMdx.template' const PUBLISHED_SECTIONS = [ @@ -51,30 +53,39 @@ const getGuidesMarkdownInternal = async (slug: string[]) => { notFound() } - let mdx: string try { - mdx = await readFile(fullPath, 'utf-8') - } catch { - // Not using console.error because this includes pages that are genuine - // 404s and clutters up the logs - console.log('Error reading Markdown at path: %s', fullPath) - notFound() - } + const guide = (await GuideModelLoader.fromFs(relative(GUIDES_DIRECTORY, fullPath))).unwrap() + const content = guide.content ?? '' + const meta = guide.metadata ?? {} - const editLink = newEditLink( - `supabase/supabase/blob/master/apps/docs/content/guides/${relPath}.mdx` - ) + if (!isValidGuideFrontmatter(meta)) { + throw Error(`Type of frontmatter is not valid for path: ${fullPath}`) + } - const { data: meta, content } = matter(mdx) - if (!isValidGuideFrontmatter(meta)) { - throw Error('Type of frontmatter is not valid') - } + const editLink = newEditLink( + `supabase/supabase/blob/master/apps/docs/content/guides/${relPath}.mdx` + ) - return { - pathname: `/guides/${slug.join('/')}` satisfies `/${string}`, - meta, - content, - editLink, + return { + pathname: `/guides/${slug.join('/')}` satisfies `/${string}`, + meta, + content, + editLink, + } + } catch (error: unknown) { + if (error instanceof FileNotFoundError) { + // Not using console.error because this includes pages that are genuine + // 404s and clutters up the logs + console.log('Could not read Markdown at path: %s', fullPath) + } else { + console.error( + 'Error processing Markdown file at path: %s:\n\t%s', + fullPath, + extractMessageFromAnyError(error) + ) + Sentry.captureException(error) + } + notFound() } } diff --git a/apps/docs/features/helpers.fn.ts b/apps/docs/features/helpers.fn.ts index 0c6c38da842ca..76f3a7b707b56 100644 --- a/apps/docs/features/helpers.fn.ts +++ b/apps/docs/features/helpers.fn.ts @@ -113,6 +113,11 @@ export class Result { return this as unknown as Result } + async mapAsync(fn: (data: Ok) => Promise): Promise> { + if (this.isOk()) return Result.ok(await fn(this.internal.data!)) + return this as unknown as Result + } + mapError(fn: (error: Error) => MappedError): Result { if (this.isOk()) return this as unknown as Result return Result.error(fn(this.internal.error!)) @@ -147,6 +152,27 @@ export class Result { return this.internal.data! } + unwrapOr(deflt: () => Ok): Ok { + if (this.isOk()) return this.internal.data! + return deflt() + } + + unwrapError(): Error { + if (this.isOk()) { + throw new Error(`UnwrapError called on Ok`) + } + return this.internal.error! + } + + unwrapErrorSafe(): Error | null { + return this.internal.error + } + + unwrapEither(): Ok | Error { + if (this.isOk()) return this.unwrap() + return this.unwrapError() + } + join( other: Result ): Result<[Ok, OtherOk], [Error, OtherError]> { @@ -158,3 +184,47 @@ export class Result { return Result.ok([this.internal.data!, other.internal.data!]) } } + +export class Both { + private internal: { + left: Left + right: Right + } + + constructor(left: Left, right: Right) { + this.internal = { + left, + right, + } + } + + mapLeft(fn: (left: Left) => NewLeft): Both { + return new Both(fn(this.internal.left), this.internal.right) + } + + mapRight(fn: (right: Right) => NewRight): Both { + return new Both(this.internal.left, fn(this.internal.right)) + } + + async mapLeftAsync(fn: (left: Left) => Promise): Promise> { + const res = await fn(this.internal.left) + return new Both(res, this.internal.right) + } + + unwrapLeft(): Left { + return this.internal.left + } + + unwrapRight(): Right { + return this.internal.right + } + + combine(fn: (left: Left, right: Right) => Output): Output { + return fn(this.internal.left, this.internal.right) + } + + intoResult(): Result { + if (this.internal.right) return Result.error(this.internal.right) + return Result.ok(this.internal.left) + } +} diff --git a/apps/docs/features/helpers.types.ts b/apps/docs/features/helpers.types.ts index 252e21955bbaf..45bc7c18ce02b 100644 --- a/apps/docs/features/helpers.types.ts +++ b/apps/docs/features/helpers.types.ts @@ -1,7 +1,5 @@ -type OrPromise = T | Promise +export type Json = string | number | boolean | { [key: string]: Json } | Json[] -type Json = string | number | boolean | { [key: string]: Json } | Json[] +export type OrPromise = T | Promise -type WithRequired = T & { [P in K]-?: T[P] } - -export type { Json, OrPromise, WithRequired } +export type WithRequired = T & { [P in K]-?: T[P] } diff --git a/apps/docs/lib/openAi.ts b/apps/docs/lib/openAi.ts index 61b51f11dd21e..45e361257c249 100644 --- a/apps/docs/lib/openAi.ts +++ b/apps/docs/lib/openAi.ts @@ -10,13 +10,18 @@ import { Result } from '~/features/helpers.fn' type Embedding = Array +export interface EmbeddingWithTokens { + embedding: Embedding + token_count: number +} + interface ModerationFlaggedDetails { flagged: boolean categories: OpenAI.Moderations.Moderation.Categories } export interface OpenAIClientInterface { - createContentEmbedding(text: string): Promise> + createContentEmbedding(text: string): Promise> } let openAIClient: OpenAIClientInterface | null @@ -26,7 +31,9 @@ class OpenAIClient implements OpenAIClientInterface { constructor(private client: OpenAI) {} - async createContentEmbedding(text: string): Promise> { + async createContentEmbedding( + text: string + ): Promise> { return await Result.tryCatchFlat( this.createContentEmbeddingImpl.bind(this), convertUnknownToApiError, @@ -36,7 +43,7 @@ class OpenAIClient implements OpenAIClientInterface { private async createContentEmbeddingImpl( text: string - ): Promise>> { + ): Promise>> { const query = text.trim() const moderationResponse = await this.client.moderations.create({ input: query }) @@ -55,7 +62,12 @@ class OpenAIClient implements OpenAIClientInterface { input: query, }) const [{ embedding: queryEmbedding }] = embeddingsResponse.data - return Result.ok(queryEmbedding) + const tokenCount = embeddingsResponse.usage.total_tokens + + return Result.ok({ + embedding: queryEmbedding, + token_count: tokenCount, + }) } } diff --git a/apps/docs/lib/supabase.ts b/apps/docs/lib/supabase.ts index e7c1768a374ba..78f371e4b8525 100644 --- a/apps/docs/lib/supabase.ts +++ b/apps/docs/lib/supabase.ts @@ -1,11 +1,32 @@ import { createClient, type SupabaseClient } from '@supabase/supabase-js' import { type Database as DatabaseGenerated } from 'common' -type Database = { +export type Database = { content: DatabaseGenerated['content'] graphql_public: DatabaseGenerated['graphql_public'] public: { - Tables: DatabaseGenerated['public']['Tables'] + Tables: Omit & { + page_section: Omit< + DatabaseGenerated['public']['Tables']['page_section'], + 'Row' | 'Insert' | 'Update' + > & { + Row: Omit & { + embedding: Array | null + } + Insert: Omit< + DatabaseGenerated['public']['Tables']['page_section']['Insert'], + 'embedding' + > & { + embedding?: Array | null + } + Update: Omit< + DatabaseGenerated['public']['Tables']['page_section']['Update'], + 'embedding' + > & { + embedding?: Array | null + } + } + } Views: DatabaseGenerated['public']['Views'] Functions: Omit< DatabaseGenerated['public']['Functions'], diff --git a/apps/docs/lib/supabaseAdmin.ts b/apps/docs/lib/supabaseAdmin.ts index 334fe3035a5bd..3df3802759e58 100644 --- a/apps/docs/lib/supabaseAdmin.ts +++ b/apps/docs/lib/supabaseAdmin.ts @@ -2,7 +2,7 @@ import 'server-only' import { createClient, type SupabaseClient } from '@supabase/supabase-js' -import { type Database } from 'common' +import { type Database } from '~/lib/supabase' let supabaseAdminClient: SupabaseClient | null = null diff --git a/apps/docs/package.json b/apps/docs/package.json index ae1416285242f..82594065ea1db 100644 --- a/apps/docs/package.json +++ b/apps/docs/package.json @@ -6,7 +6,7 @@ "scripts": { "build": "next build", "build:analyze": "ANALYZE=true next build", - "build:llms": "tsx ./scripts/llms.ts", + "build:llms": "tsx --conditions=react-server ./scripts/llms.ts", "build:sitemap": "tsx ./internals/generate-sitemap.ts", "clean": "rimraf .next .turbo node_modules features/docs/generated examples __generated__", "codegen:examples": "shx cp -r ../../examples ./examples", @@ -16,7 +16,7 @@ "dev": "concurrently --kill-others \"next dev --port 3001\" \"pnpm run dev:watch:troubleshooting\"", "dev:secrets:pull": "AWS_PROFILE=supabase-dev node ../../scripts/getSecrets.js -n local/docs", "dev:watch:troubleshooting": "node ./scripts/troubleshooting/watch.mjs", - "embeddings": "tsx scripts/search/generate-embeddings.ts", + "embeddings": "tsx --conditions=react-server scripts/search/generate-embeddings.ts", "embeddings:refresh": "pnpm run embeddings --refresh", "last-changed": "tsx scripts/last-changed.ts", "last-changed:reset": "pnpm run last-changed -- --reset", @@ -138,6 +138,7 @@ "@types/react-copy-to-clipboard": "^5.0.4", "@types/react-dom": "catalog:", "@types/unist": "^2.0.6", + "@types/uuid": "^10.0.0", "api-types": "workspace:*", "cheerio": "^1.0.0-rc.12", "chokidar": "^4.0.3", diff --git a/apps/docs/public/humans.txt b/apps/docs/public/humans.txt index 7c8e85fa134b3..bcfefdf753377 100644 --- a/apps/docs/public/humans.txt +++ b/apps/docs/public/humans.txt @@ -86,6 +86,7 @@ Luca Forstner Manan Gupta Margarita Sandomirskaia Mark Burggraf +Matt Rossman Monica Khoury Mykhailo Mischa Lieibenson Nyannyacha diff --git a/apps/docs/resources/globalSearch/globalSearchModel.ts b/apps/docs/resources/globalSearch/globalSearchModel.ts index 8c63f28f49374..c6243d530c9a3 100644 --- a/apps/docs/resources/globalSearch/globalSearchModel.ts +++ b/apps/docs/resources/globalSearch/globalSearchModel.ts @@ -22,7 +22,7 @@ export abstract class SearchResultModel { const includeFullContent = requestedFields.includes('content') const embeddingResult = await openAI().createContentEmbedding(query) - return embeddingResult.flatMapAsync(async (embedding) => { + return embeddingResult.flatMapAsync(async ({ embedding }) => { const matchResult = new Result( await supabase().rpc('search_content', { embedding, @@ -49,7 +49,7 @@ export abstract class SearchResultModel { const includeFullContent = requestedFields.includes('content') const embeddingResult = await openAI().createContentEmbedding(query) - return embeddingResult.flatMapAsync(async (embedding) => { + return embeddingResult.flatMapAsync(async ({ embedding }) => { const matchResult = new Result( await supabase().rpc('search_content_hybrid', { query_text: query, diff --git a/apps/docs/resources/guide/guideModel.ts b/apps/docs/resources/guide/guideModel.ts index 5201d7a9cc024..dbdfff02aea53 100644 --- a/apps/docs/resources/guide/guideModel.ts +++ b/apps/docs/resources/guide/guideModel.ts @@ -3,23 +3,31 @@ import { type SearchResultInterface } from '../globalSearch/globalSearchInterfac export class GuideModel implements SearchResultInterface { public title?: string public href?: string + public checksum?: string public content?: string + public metadata?: Record public subsections: Array constructor({ title, href, + checksum, content, + metadata, subsections, }: { title?: string href?: string + checksum?: string content?: string + metadata?: Record subsections?: Array<{ title?: string; href?: string; content?: string }> }) { this.title = title this.href = href + this.checksum = checksum this.content = content + this.metadata = metadata this.subsections = subsections?.map((subsection) => new SubsectionModel(subsection)) ?? [] } } diff --git a/apps/docs/resources/guide/guideModelLoader.ts b/apps/docs/resources/guide/guideModelLoader.ts new file mode 100644 index 0000000000000..4ba9674de1ef3 --- /dev/null +++ b/apps/docs/resources/guide/guideModelLoader.ts @@ -0,0 +1,201 @@ +import matter from 'gray-matter' +import { promises as fs } from 'node:fs' +import { join, relative } from 'node:path' + +import { extractMessageFromAnyError, FileNotFoundError, MultiError } from '~/app/api/utils' +import { preprocessMdxWithDefaults } from '~/features/directives/utils' +import { Both, Result } from '~/features/helpers.fn' +import { GUIDES_DIRECTORY } from '~/lib/docs' +import { processMdx } from '~/scripts/helpers.mdx' +import { GuideModel } from './guideModel' + +/** + * Determines if a file is hidden. + * + * A file is hidden if its name, or the name of any of its parent directories, + * starts with an underscore. + */ +function isHiddenFile(path: string): boolean { + return path.split('/').some((part) => part.startsWith('_')) +} + +/** + * Recursively walks a directory and collects all .mdx files that are not hidden. + */ +async function walkMdxFiles( + dir: string, + multiError: { current: MultiError | null } +): Promise> { + const readDirResult = await Result.tryCatch( + () => fs.readdir(dir, { recursive: true }), + (error) => error + ) + + return readDirResult.match( + (allPaths) => { + const mdxFiles: string[] = [] + + for (const relativePath of allPaths) { + if (isHiddenFile(relativePath)) { + continue + } + + // Only include .mdx files + if (relativePath.endsWith('.mdx')) { + mdxFiles.push(join(dir, relativePath)) + } + } + + return mdxFiles + }, + (error) => { + // If we can't read the directory, add it to the error collection + ;(multiError.current ??= new MultiError('Failed to load some guides:')).appendError( + `Failed to read directory ${dir}: ${extractMessageFromAnyError(error)}`, + error + ) + return [] + } + ) +} + +/** + * Node.js-specific loader for GuideModel instances from the filesystem. + * This class contains all the filesystem operations that require Node.js capabilities. + */ +export class GuideModelLoader { + /** + * Creates a GuideModel instance by loading and processing a markdown file from the filesystem. + * + * @param relPath - Relative path to the markdown file within the guides directory (e.g., "auth/users.mdx") + * @returns A Result containing either the processed GuideModel or an error message + * + * @example + * ```typescript + * const result = await GuideModelLoader.fromFs('auth/users.mdx') + * result.match( + * (guide) => console.log(guide.title, guide.subsections.length), + * (error) => console.error(error) + * ) + * ``` + */ + static async fromFs(relPath: string): Promise> { + return Result.tryCatch( + async () => { + // Read the markdown file from the guides directory + const filePath = join(GUIDES_DIRECTORY, relPath) + const fileContent = await fs.readFile(filePath, 'utf-8') + + // Parse frontmatter using gray-matter + const { data: metadata, content: rawContent } = matter(fileContent) + + // Replace partials and code samples using directives + const processedContent = await preprocessMdxWithDefaults(rawContent) + + // Process MDX to get chunked sections for embedding + const { sections } = await processMdx(processedContent) + + // Create subsections from the chunked sections + const subsections = sections.map((section) => ({ + title: section.heading, + href: section.slug, + content: section.content, + })) + + // Extract title from metadata or first heading + const title = metadata.title || sections.find((s) => s.heading)?.heading + + // Create href from relative path (remove .mdx extension) + const href = `https://supabase.com/docs/guides/${relPath.replace(/\.mdx?$/, '')}` + + return new GuideModel({ + title, + href, + content: processedContent, + metadata, + subsections, + }) + }, + (error) => { + if (error instanceof Error && 'code' in error && error.code === 'ENOENT') { + return new FileNotFoundError('', error) + } + return new Error( + `Failed to load guide from ${relPath}: ${extractMessageFromAnyError(error)}`, + { + cause: error, + } + ) + } + ) + } + + /** + * Loads GuideModels from a list of file paths in parallel, collecting any + * errors without stopping. + */ + private static async loadGuides( + filePaths: Array, + multiError: { current: MultiError | null } + ): Promise> { + const loadPromises = filePaths.map(async (filePath) => { + const relPath = relative(GUIDES_DIRECTORY, filePath) + return this.fromFs(relPath) + }) + + const results = await Promise.all(loadPromises) + const guides: Array = [] + + results.forEach((result, index) => { + const relPath = relative(GUIDES_DIRECTORY, filePaths[index]) + + result.match( + (guide) => guides.push(guide), + (error) => { + ;(multiError.current ??= new MultiError('Failed to load some guides:')).appendError( + `Failed to load ${relPath}: ${extractMessageFromAnyError(error)}`, + error + ) + } + ) + }) + + return guides + } + + /** + * Loads all guide models from the filesystem by walking the content directory. + * + * This method recursively walks the guides directory (or a specific section + * subdirectory) and loads all non-hidden .mdx files. + * + * If errors occur while loading individual files, they are collected but + * don't prevent other files from loading. + * + * @param section - Optional section name to limit walking to a specific + * subdirectory (e.g., "database", "auth") + * @returns A Both containing [successful GuideModels, MultiError with all + * failures or null if no errors] + * + * @example + * ```typescript + * // Load all guides + * const guides = (await GuideModelLoader.allFromFs()).unwrapLeft() + * + * // Load only database guides + * const dbGuides = (await GuideModelLoader.allFromFs('database')).unwrapLeft() + * ``` + */ + static async allFromFs(section?: string): Promise, MultiError | null>> { + const searchDir = section ? join(GUIDES_DIRECTORY, section) : GUIDES_DIRECTORY + const multiError = { current: null as MultiError | null } + + // Get all .mdx files in the search directory + const mdxFiles = await walkMdxFiles(searchDir, multiError) + + // Load each file and collect results + const guides = await this.loadGuides(mdxFiles, multiError) + + return new Both(guides, multiError.current) + } +} diff --git a/apps/docs/scripts/helpers.mdx.ts b/apps/docs/scripts/helpers.mdx.ts index c6d9ce672ef55..b71d01d8a093c 100644 --- a/apps/docs/scripts/helpers.mdx.ts +++ b/apps/docs/scripts/helpers.mdx.ts @@ -1,17 +1,12 @@ -import { createHash } from 'node:crypto' -import { ObjectExpression } from 'estree' import GithubSlugger from 'github-slugger' import matter from 'gray-matter' import { type Content, type Root } from 'mdast' import { fromMarkdown } from 'mdast-util-from-markdown' import { toMarkdown } from 'mdast-util-to-markdown' -import { mdxFromMarkdown, type MdxjsEsm } from 'mdast-util-mdx' +import { mdxFromMarkdown, mdxToMarkdown } from 'mdast-util-mdx' import { toString } from 'mdast-util-to-string' import { mdxjs } from 'micromark-extension-mdxjs' import { u } from 'unist-builder' -import { filter } from 'unist-util-filter' - -type Json = Record type Section = { content: string @@ -21,17 +16,25 @@ type Section = { export type ProcessedMdx = { checksum: string - meta: Json + meta: Record sections: Section[] } +async function createHash(content: string): Promise { + const encoder = new TextEncoder() + const data = encoder.encode(content) + const hashBuffer = await crypto.subtle.digest('SHA-256', data) + const hashArray = Array.from(new Uint8Array(hashBuffer)) + return hashArray.map((byte) => byte.toString(16).padStart(2, '0')).join('') +} + /** * Process MDX content. * * Splits MDX content into sections based on headings, and calculates checksum. */ -function processMdx(content: string, options?: { yaml?: boolean }): ProcessedMdx { - const checksum = createHash('sha256').update(content).digest('base64') +async function processMdx(content: string, options?: { yaml?: boolean }): Promise { + const checksum = await createHash(content) let frontmatter: Record = {} if (options?.yaml) { @@ -45,43 +48,15 @@ function processMdx(content: string, options?: { yaml?: boolean }): ProcessedMdx mdastExtensions: [mdxFromMarkdown()], }) - let meta: Record | undefined - if (options?.yaml) { - meta = frontmatter - } else { - meta = extractMetaExport(mdxTree) - } - - const serializableMeta: Json = meta && JSON.parse(JSON.stringify(meta)) - - // Remove all MDX elements from markdown - const mdTree = filter( - mdxTree, - (node) => - ![ - 'mdxjsEsm', - 'mdxJsxFlowElement', - 'mdxJsxTextElement', - 'mdxFlowExpression', - 'mdxTextExpression', - ].includes(node.type) - ) - - if (!mdTree) { - return { - checksum, - meta: serializableMeta, - sections: [], - } - } - - const sectionTrees = splitTreeBy(mdTree, (node) => node.type === 'heading') + const sectionTrees = splitTreeBy(mdxTree, (node) => node.type === 'heading') const slugger = new GithubSlugger() const sections = sectionTrees.map((tree) => { const [firstNode] = tree.children - const content = toMarkdown(tree) + const content = toMarkdown(tree, { + extensions: [mdxToMarkdown()], + }) const rawHeading: string | undefined = firstNode.type === 'heading' ? toString(firstNode) : undefined @@ -103,74 +78,11 @@ function processMdx(content: string, options?: { yaml?: boolean }): ProcessedMdx return { checksum, - meta: serializableMeta, sections, + meta: frontmatter, } } -/** - * Extracts the `meta` ESM export from the MDX file. - * - * This info is akin to frontmatter. - */ -function extractMetaExport(mdxTree: Root) { - const metaExportNode = mdxTree.children.find((node): node is MdxjsEsm => { - return ( - node.type === 'mdxjsEsm' && - node.data?.estree?.body[0]?.type === 'ExportNamedDeclaration' && - node.data.estree.body[0].declaration?.type === 'VariableDeclaration' && - node.data.estree.body[0].declaration.declarations[0]?.id.type === 'Identifier' && - node.data.estree.body[0].declaration.declarations[0].id.name === 'meta' - ) - }) - - if (!metaExportNode) { - return undefined - } - - const objectExpression = - (metaExportNode.data?.estree?.body[0]?.type === 'ExportNamedDeclaration' && - metaExportNode.data.estree.body[0].declaration?.type === 'VariableDeclaration' && - metaExportNode.data.estree.body[0].declaration.declarations[0]?.id.type === 'Identifier' && - metaExportNode.data.estree.body[0].declaration.declarations[0].id.name === 'meta' && - metaExportNode.data.estree.body[0].declaration.declarations[0].init?.type === - 'ObjectExpression' && - metaExportNode.data.estree.body[0].declaration.declarations[0].init) || - undefined - - if (!objectExpression) { - return undefined - } - - return getObjectFromExpression(objectExpression) -} - -/** - * Extracts ES literals from an `estree` `ObjectExpression` - * into a plain JavaScript object. - */ -function getObjectFromExpression(node: ObjectExpression) { - return node.properties.reduce< - Record - >((object, property) => { - if (property.type !== 'Property') { - return object - } - - const key = (property.key.type === 'Identifier' && property.key.name) || undefined - const value = (property.value.type === 'Literal' && property.value.value) || undefined - - if (!key) { - return object - } - - return { - ...object, - [key]: value, - } - }, {}) -} - /** * Splits a `mdast` tree into multiple trees based on * a predicate function. Will include the splitting node @@ -210,4 +122,4 @@ function parseHeading(heading: string): { heading: string; customAnchor?: string } export { processMdx } -export type { Json, Section } +export type { Section } diff --git a/apps/docs/scripts/search/generate-embeddings.ts b/apps/docs/scripts/search/generate-embeddings.ts index 288c86b35e8b0..d8a7bfdf90920 100644 --- a/apps/docs/scripts/search/generate-embeddings.ts +++ b/apps/docs/scripts/search/generate-embeddings.ts @@ -4,7 +4,7 @@ import { createClient } from '@supabase/supabase-js' import { parseArgs } from 'node:util' import { OpenAI } from 'openai' import { v4 as uuidv4 } from 'uuid' -import type { Json, Section } from '../helpers.mdx.js' +import type { Section } from '../helpers.mdx.js' import { fetchAllSources } from './sources/index.js' const args = parseArgs({ @@ -78,8 +78,8 @@ async function generateEmbeddings() { checksum: string sections: Section[] ragIgnore?: boolean - meta?: Json - } = embeddingSource.process() + meta?: Record + } = await embeddingSource.process() // Check for existing page in DB and compare checksums const { error: fetchPageError, data: existingPage } = await supabaseClient diff --git a/apps/docs/scripts/search/sources/base.ts b/apps/docs/scripts/search/sources/base.ts index 32b5aad9e9cdc..a7835ccd924cd 100644 --- a/apps/docs/scripts/search/sources/base.ts +++ b/apps/docs/scripts/search/sources/base.ts @@ -1,4 +1,4 @@ -import type { Json, Section } from '../../helpers.mdx.js' +import type { Section } from '../../helpers.mdx.js' export abstract class BaseLoader { type: string @@ -14,7 +14,7 @@ export abstract class BaseLoader { export abstract class BaseSource { type: string checksum?: string - meta?: Json + meta?: Record sections?: Section[] constructor( @@ -22,7 +22,12 @@ export abstract class BaseSource { public path: string ) {} - abstract process(): { checksum: string; meta?: Json; ragIgnore?: boolean; sections: Section[] } + abstract process(): Promise<{ + checksum: string + meta?: Record + ragIgnore?: boolean + sections: Section[] + }> abstract extractIndexedContent(): string } diff --git a/apps/docs/scripts/search/sources/github-discussion.ts b/apps/docs/scripts/search/sources/github-discussion.ts index fe3db305ef2c8..fc29ce7e7c8a5 100644 --- a/apps/docs/scripts/search/sources/github-discussion.ts +++ b/apps/docs/scripts/search/sources/github-discussion.ts @@ -104,7 +104,7 @@ export class GitHubDiscussionSource extends BaseSource { super(source, path) } - process() { + async process() { const { id, title, updatedAt, body, databaseId } = this.discussion const checksum = createHash('sha256').update(updatedAt).digest('base64') diff --git a/apps/docs/scripts/search/sources/lint-warnings-guide.ts b/apps/docs/scripts/search/sources/lint-warnings-guide.ts index 77713504e0109..b329f0be819ea 100644 --- a/apps/docs/scripts/search/sources/lint-warnings-guide.ts +++ b/apps/docs/scripts/search/sources/lint-warnings-guide.ts @@ -92,7 +92,7 @@ export class LintWarningsGuideSource extends BaseSource { super(source, path) } - process() { + async process() { this.checksum = createHash('sha256').update(this.lint.content).digest('base64') this.meta = { diff --git a/apps/docs/scripts/search/sources/markdown.ts b/apps/docs/scripts/search/sources/markdown.ts index 74b5b8fe25599..0c6f759d6e317 100644 --- a/apps/docs/scripts/search/sources/markdown.ts +++ b/apps/docs/scripts/search/sources/markdown.ts @@ -1,5 +1,5 @@ -import { readFile } from 'fs/promises' -import { processMdx } from '../../helpers.mdx.js' +import { SubsectionModel } from '../../../resources/guide/guideModel.js' +import { GuideModelLoader } from '../../../resources/guide/guideModelLoader.js' import { BaseLoader, BaseSource } from './base.js' export class MarkdownLoader extends BaseLoader { @@ -15,8 +15,22 @@ export class MarkdownLoader extends BaseLoader { } async load() { - const contents = await readFile(this.filePath, 'utf8') - return [new MarkdownSource(this.source, this.path, contents, this.options)] + const guide = ( + await GuideModelLoader.fromFs(this.filePath.replace(/^content\/guides/, '')) + ).unwrap() + return [ + new MarkdownSource( + this.source, + this.path, + guide.content ?? '', + { + checksum: guide.checksum, + meta: guide.metadata, + sections: guide.subsections, + }, + this.options + ), + ] } } @@ -27,19 +41,29 @@ export class MarkdownSource extends BaseSource { source: string, path: string, public contents: string, + { + checksum, + meta, + sections, + }: { checksum?: string; meta?: Record; sections: Array }, public options?: { yaml?: boolean } ) { super(source, path) - } - - process() { - const { checksum, meta, sections } = processMdx(this.contents, this.options) - this.checksum = checksum - this.meta = meta - this.sections = sections + this.meta = meta ?? {} + this.sections = sections.map((section) => ({ + content: section.content ?? '', + heading: section.title, + slug: section.href, + })) + } - return { checksum, meta, sections } + async process() { + return { + checksum: this.checksum ?? '', + meta: this.meta, + sections: this.sections ?? [], + } } extractIndexedContent(): string { diff --git a/apps/docs/scripts/search/sources/partner-integrations.ts b/apps/docs/scripts/search/sources/partner-integrations.ts index 8370d414d0960..ad9964722e4e5 100644 --- a/apps/docs/scripts/search/sources/partner-integrations.ts +++ b/apps/docs/scripts/search/sources/partner-integrations.ts @@ -57,8 +57,8 @@ export class IntegrationSource extends BaseSource { super(source, path) } - process() { - const { checksum, sections } = processMdx(this.partnerData.overview) + async process() { + const { checksum, sections } = await processMdx(this.partnerData.overview) const meta = { title: upperFirst(this.partnerData.slug), subtitle: 'Integration', diff --git a/apps/docs/scripts/search/sources/reference-doc.ts b/apps/docs/scripts/search/sources/reference-doc.ts index c8c54211fa76c..68a28e5e418f1 100644 --- a/apps/docs/scripts/search/sources/reference-doc.ts +++ b/apps/docs/scripts/search/sources/reference-doc.ts @@ -12,7 +12,6 @@ import { getApiEndpointById } from '../../../features/docs/Reference.generated.s import type { CliCommand, CliSpec } from '../../../generator/types/CliSpec.js' import { flattenSections } from '../../../lib/helpers.js' import { enrichedOperation, gen_v3 } from '../../../lib/refGenerator/helpers.js' -import type { Json } from '../../helpers.mdx.js' import { BaseLoader, BaseSource } from './base.js' export abstract class ReferenceLoader extends BaseLoader { @@ -24,7 +23,7 @@ export abstract class ReferenceLoader extends BaseLoader { constructor( source: string, path: string, - public meta: Json, + public meta: Record, public specFilePath: string, public sectionsFilePath: string ) { @@ -68,7 +67,7 @@ export abstract class ReferenceLoader extends BaseLoader { specSections: SpecSection[], id: string ): SpecSection | undefined | Promise - enhanceMeta(_section: SpecSection): Json { + enhanceMeta(_section: SpecSection): Record { return this.meta } } @@ -81,12 +80,12 @@ export abstract class ReferenceSource extends BaseSource { path: string, public refSection: ICommonSection, public specSection: SpecSection, - public meta: Json + public meta: Record ) { super(source, path) } - process() { + async process() { const checksum = createHash('sha256') .update(JSON.stringify(this.refSection) + JSON.stringify(this.specSection)) .digest('base64') @@ -125,7 +124,7 @@ export class OpenApiReferenceLoader extends ReferenceLoader, specFilePath: string, sectionsFilePath: string ) { @@ -251,7 +250,7 @@ export class ClientLibReferenceLoader extends ReferenceLoader, specFilePath: string, sectionsFilePath: string ) { @@ -272,7 +271,7 @@ export class ClientLibReferenceLoader extends ReferenceLoader functionDefinition.id === id) } - enhanceMeta(section: IFunctionDefinition): Json { + enhanceMeta(section: IFunctionDefinition): Record { return { ...this.meta, slug: section.id, methodName: section.title } } } @@ -311,7 +310,7 @@ export class CliReferenceLoader extends ReferenceLoader { constructor( source: string, path: string, - meta: Json, + meta: Record, specFilePath: string, sectionsFilePath: string ) { diff --git a/apps/studio/components/grid/components/grid/Grid.tsx b/apps/studio/components/grid/components/grid/Grid.tsx index 1fb1d700460f2..ffc35913baeea 100644 --- a/apps/studio/components/grid/components/grid/Grid.tsx +++ b/apps/studio/components/grid/components/grid/Grid.tsx @@ -1,5 +1,6 @@ import { forwardRef, memo, Ref, useRef } from 'react' import DataGrid, { CalculatedColumn, DataGridHandle } from 'react-data-grid' +import { ref as valtioRef } from 'valtio' import { handleCopyCell } from 'components/grid/SupabaseGrid.utils' import { formatForeignKeys } from 'components/interfaces/TableGridEditor/SidePanelEditor/ForeignKeySelector/ForeignKeySelector.utils' @@ -9,6 +10,7 @@ import { ENTITY_TYPE } from 'data/entity-types/entity-type-constants' import { useSendEventMutation } from 'data/telemetry/send-event-mutation' import { useSelectedOrganizationQuery } from 'hooks/misc/useSelectedOrganization' import { useSelectedProjectQuery } from 'hooks/misc/useSelectedProject' +import { useCsvFileDrop } from 'hooks/ui/useCsvFileDrop' import { useTableEditorStateSnapshot } from 'state/table-editor' import { useTableEditorTableStateSnapshot } from 'state/table-editor-table' import { Button, cn } from 'ui' @@ -72,8 +74,25 @@ export const Grid = memo( const table = snap.table const tableEntityType = snap.originalTable?.entity_type + const isForeignTable = tableEntityType === ENTITY_TYPE.FOREIGN_TABLE + const isTableEmpty = (rows ?? []).length === 0 const { mutate: sendEvent } = useSendEventMutation() + + const { isDraggedOver, onDragOver, onFileDrop } = useCsvFileDrop({ + enabled: isTableEmpty && !isForeignTable, + onFileDropped: (file) => tableEditorSnap.onImportData(valtioRef(file)), + onTelemetryEvent: (eventName) => { + sendEvent({ + action: eventName, + groups: { + project: project?.ref ?? 'Unknown', + organization: org?.slug ?? 'Unknown', + }, + }) + }, + }) + const { data } = useForeignKeyConstraintsQuery({ projectRef: project?.ref, connectionString: project?.connectionString, @@ -115,8 +134,15 @@ export const Grid = memo( return (
{/* Render no rows fallback outside of the DataGrid */} {(rows ?? []).length === 0 && ( @@ -143,7 +169,9 @@ export const Grid = memo( <> {(filters ?? []).length === 0 ? (
-

This table is empty

+

+ {isDraggedOver ? 'Drop your CSV file here' : 'This table is empty'} +

{tableEntityType === ENTITY_TYPE.FOREIGN_TABLE ? (

@@ -152,25 +180,30 @@ export const Grid = memo(

) : ( -
- -
+ !isDraggedOver && ( +
+ +

+ or drag and drop a CSV file here +

+
+ ) )}
) : ( diff --git a/apps/studio/components/interfaces/Auth/MfaAuthSettingsForm/MfaAuthSettingsForm.tsx b/apps/studio/components/interfaces/Auth/MfaAuthSettingsForm/MfaAuthSettingsForm.tsx index 90a1c353c4917..674753ee1514d 100644 --- a/apps/studio/components/interfaces/Auth/MfaAuthSettingsForm/MfaAuthSettingsForm.tsx +++ b/apps/studio/components/interfaces/Auth/MfaAuthSettingsForm/MfaAuthSettingsForm.tsx @@ -35,6 +35,7 @@ import { WarningIcon, } from 'ui' import { FormItemLayout } from 'ui-patterns/form/FormItemLayout/FormItemLayout' +import ConfirmationModal from 'ui-patterns/Dialogs/ConfirmationModal' function determineMFAStatus(verifyEnabled: boolean, enrollEnabled: boolean) { return verifyEnabled ? (enrollEnabled ? 'Enabled' : 'Verify Enabled') : 'Disabled' @@ -87,6 +88,8 @@ const MfaAuthSettingsForm = () => { const [isUpdatingTotpForm, setIsUpdatingTotpForm] = useState(false) const [isUpdatingPhoneForm, setIsUpdatingPhoneForm] = useState(false) + const [isConfirmationModalVisible, setIsConfirmationModalVisible] = useState(false) + const { can: canReadConfig } = useAsyncCheckProjectPermissions( PermissionAction.READ, 'custom_config_gotrue' @@ -228,6 +231,14 @@ const MfaAuthSettingsForm = () => { const hasUpgradedPhoneMFA = authConfig && !authConfig.MFA_PHONE_VERIFY_ENABLED && phoneMFAIsEnabled + const maybeConfirmPhoneMFAOrSubmit = () => { + if (hasUpgradedPhoneMFA) { + setIsConfirmationModalVisible(true) + } else { + phoneForm.handleSubmit(onSubmitPhoneForm)() + } + } + return ( <> @@ -321,7 +332,13 @@ const MfaAuthSettingsForm = () => { SMS MFA -
+ { + e.preventDefault() + maybeConfirmPhoneMFAOrSubmit() + }} + className="space-y-4" + > {promptProPlanUpgrade && ( @@ -420,20 +437,6 @@ const MfaAuthSettingsForm = () => { /> - {hasUpgradedPhoneMFA && ( - - - - - Enabling advanced MFA with phone will result in an additional charge of{' '} - $75 - per month for the first project in the organization and an additional{' '} - $10 per month for additional projects. - - - - )} - {phoneForm.formState.isDirty && ( -
, - { duration: Infinity } - ) - } else { + // Process a file into table rows and columns (used for both upload and drop) + const processFile = useCallback( + async (file: File) => { updateEditorDirty(true) setUploadedFile(file) + setParseProgress(0) + const { headers, rowCount, columnTypeMap, errors, previewRows } = await parseSpreadsheet( file, onProgressUpdate ) + if (errors.length > 0) { toast.error( `Some issues have been detected on ${errors.length} rows. More details below the content preview.` @@ -115,17 +98,31 @@ const SpreadsheetImport = ({ setErrors(errors) setSelectedHeaders(headers) setSpreadsheetData({ headers, rows: previewRows, rowCount, columnTypeMap }) - } - event.target.value = '' - } + }, + [updateEditorDirty] + ) - const resetSpreadsheetImport = () => { + // Handle file upload events from file input + const onFileUpload = useCallback( + async (event: any) => { + event.persist() + const [file] = event.target.files || event.dataTransfer.files + if (file && !flagInvalidFileImport(file)) { + await processFile(file) + } else { + event.target.value = '' + } + }, + [processFile] + ) + + const resetSpreadsheetImport = useCallback(() => { setInput('') setSpreadsheetData(EMPTY_SPREADSHEET_DATA) setUploadedFile(undefined) setErrors([]) updateEditorDirty(false) - } + }, [updateEditorDirty]) const readSpreadsheetText = async (text: string) => { if (text.length > 0) { @@ -143,6 +140,7 @@ const SpreadsheetImport = ({ } } + // eslint-disable-next-line react-hooks/exhaustive-deps const handler = useCallback(debounce(readSpreadsheetText, debounceDuration), []) const onInputChange = (event: any) => { setInput(event.target.value) @@ -178,8 +176,18 @@ const SpreadsheetImport = ({ } useEffect(() => { - if (visible && headers.length === 0) resetSpreadsheetImport() - }, [visible]) + if (visiblityChanged && visible) { + if (fileFromState) processFile(fileFromState) + else if (headers.length === 0) resetSpreadsheetImport() + } + }, [ + visiblityChanged, + visible, + fileFromState, + processFile, + headers.length, + resetSpreadsheetImport, + ]) return ( { const ext = file?.name.split('.').pop().toLowerCase() return UPLOAD_FILE_EXTENSIONS.includes(ext) } + +export function flagInvalidFileImport(file: File): boolean { + if (!file || !UPLOAD_FILE_TYPES.includes(file.type) || !acceptedFileExtension(file)) { + toast.error("Couldn't import file: only CSV files are accepted") + return true + } else if (file.size > MAX_TABLE_EDITOR_IMPORT_CSV_SIZE) { + toast.error( +
+

The dashboard currently only supports importing of CSVs below 100MB.

+

For bulk data loading, we recommend doing so directly through the database.

+ +
, + { duration: Infinity } + ) + return true + } + + return false +} diff --git a/apps/studio/components/layouts/LogsLayout/LogsLayout.tsx b/apps/studio/components/layouts/LogsLayout/LogsLayout.tsx index 2edee7ba163a5..30c4ae8bbf4d9 100644 --- a/apps/studio/components/layouts/LogsLayout/LogsLayout.tsx +++ b/apps/studio/components/layouts/LogsLayout/LogsLayout.tsx @@ -1,11 +1,9 @@ import { PermissionAction } from '@supabase/shared-types/out/constants' -import { useRouter } from 'next/router' -import { PropsWithChildren, useEffect } from 'react' -import { LOCAL_STORAGE_KEYS } from 'common' +import { PropsWithChildren } from 'react' + import NoPermission from 'components/ui/NoPermission' import { useAsyncCheckProjectPermissions } from 'hooks/misc/useCheckPermissions' -import { useLocalStorageQuery } from 'hooks/misc/useLocalStorage' import { withAuth } from 'hooks/misc/withAuth' import ProjectLayout from '../ProjectLayout/ProjectLayout' import { LogsSidebarMenuV2 } from './LogsSidebarMenuV2' @@ -20,21 +18,6 @@ const LogsLayout = ({ title, children }: PropsWithChildren) => 'logflare' ) - const router = useRouter() - const [_, setLastLogsPage] = useLocalStorageQuery( - LOCAL_STORAGE_KEYS.LAST_VISITED_LOGS_PAGE, - router.pathname.split('/logs/')[1] || '' - ) - - useEffect(() => { - if (router.pathname.includes('/logs/')) { - const path = router.pathname.split('/logs/')[1] - if (path) { - setLastLogsPage(path) - } - } - }, [router, setLastLogsPage]) - if (!canUseLogsExplorer) { if (isLoading) { return diff --git a/apps/studio/components/layouts/ProjectLayout/NavigationBar/NavigationBar.utils.tsx b/apps/studio/components/layouts/ProjectLayout/NavigationBar/NavigationBar.utils.tsx index 48faa8be67357..503d3c8d97329 100644 --- a/apps/studio/components/layouts/ProjectLayout/NavigationBar/NavigationBar.utils.tsx +++ b/apps/studio/components/layouts/ProjectLayout/NavigationBar/NavigationBar.utils.tsx @@ -116,10 +116,16 @@ export const generateProductRoutes = ( ] } -export const generateOtherRoutes = (ref?: string, project?: Project, features?: {}): Route[] => { +export const generateOtherRoutes = ( + ref?: string, + project?: Project, + features?: { unifiedLogs?: boolean } +): Route[] => { const isProjectBuilding = project?.status === PROJECT_STATUS.COMING_UP const buildingUrl = `/project/${ref}` + const unifiedLogsEnabled = features?.unifiedLogs ?? false + return [ { key: 'advisors', @@ -141,7 +147,13 @@ export const generateOtherRoutes = (ref?: string, project?: Project, features?: key: 'logs', label: 'Logs', icon: , - link: ref && (isProjectBuilding ? buildingUrl : `/project/${ref}/logs`), + link: + ref && + (isProjectBuilding + ? buildingUrl + : unifiedLogsEnabled + ? `/project/${ref}/logs` + : `/project/${ref}/logs/explorer`), }, { key: 'api', diff --git a/apps/studio/components/ui/Forms/FormSection.tsx b/apps/studio/components/ui/Forms/FormSection.tsx index c5b9881ffe364..3622bb9b814f1 100644 --- a/apps/studio/components/ui/Forms/FormSection.tsx +++ b/apps/studio/components/ui/Forms/FormSection.tsx @@ -65,11 +65,13 @@ const Shimmer = () => ( const FormSectionContent = ({ children, loading = true, + loaders, fullWidth, className, }: { children: React.ReactNode | string loading?: boolean + loaders?: number fullWidth?: boolean className?: string }) => { @@ -81,7 +83,11 @@ const FormSectionContent = ({ ${className} `} > - {loading ? Children.map(children, () => ) : children} + {loading + ? !!loaders + ? new Array(loaders).fill(0).map((_, idx) => ) + : Children.map(children, (_, idx) => ) + : children} ) } diff --git a/apps/studio/data/reports/database-charts.ts b/apps/studio/data/reports/database-charts.ts index 0efc542aedddd..2f4eb36e21df2 100644 --- a/apps/studio/data/reports/database-charts.ts +++ b/apps/studio/data/reports/database-charts.ts @@ -3,8 +3,17 @@ import { ReportAttributes } from 'components/ui/Charts/ComposedChart.utils' import { formatBytes } from 'lib/helpers' import { Organization } from 'types' import { Project } from '../projects/project-detail-query' +import { DiskAttributesData } from '../config/disk-attributes-query' +import { MaxConnectionsData } from '../database/max-connections-query' +import { PgbouncerConfigData } from '../database/pgbouncer-config-query' -export const getReportAttributes = (org: Organization, project: Project): ReportAttributes[] => { +export const getReportAttributes = ( + org: Organization, + project: Project, + diskConfig?: DiskAttributesData, + maxConnections?: MaxConnectionsData, + poolerConfig?: PgbouncerConfigData +): ReportAttributes[] => { const computeSize = project?.infra_compute_size || 'medium' return [ @@ -117,7 +126,7 @@ export const getReportAttributes = (org: Organization, project: Project): Report attribute: 'disk_iops_max', provider: 'reference-line', label: 'Max IOPS', - value: getIOPSLimits(computeSize), + value: diskConfig?.attributes?.iops, tooltip: 'Maximum IOPS (Input/Output Operations Per Second) for your current compute size', isMaxValue: true, @@ -210,10 +219,13 @@ export const getReportAttributes = (org: Organization, project: Project): Report ] } -export const getReportAttributesV2: (org: Organization, project: Project) => ReportAttributes[] = ( - org, - project -) => { +export const getReportAttributesV2: ( + org: Organization, + project: Project, + diskConfig?: DiskAttributesData, + maxConnections?: MaxConnectionsData, + poolerConfig?: PgbouncerConfigData +) => ReportAttributes[] = (org, project, diskConfig, maxConnections, poolerConfig) => { const isFreePlan = org?.plan?.id === 'free' const computeSize = project?.infra_compute_size || 'medium' const isSpendCapEnabled = @@ -369,7 +381,7 @@ export const getReportAttributesV2: (org: Organization, project: Project) => Rep attribute: 'disk_iops_max', provider: 'reference-line', label: 'Max IOPS', - value: getIOPSLimits(computeSize), + value: diskConfig?.attributes?.iops, tooltip: 'Maximum IOPS (Input/Output Operations Per Second) for your current compute size', isMaxValue: true, @@ -463,7 +475,7 @@ export const getReportAttributesV2: (org: Organization, project: Project) => Rep attribute: 'max_db_connections', provider: 'reference-line', label: 'Max connections', - value: getConnectionLimits(computeSize).direct, + value: maxConnections?.maxConnections, tooltip: 'Max available connections for your current compute size', isMaxValue: true, }, @@ -495,7 +507,7 @@ export const getReportAttributesV2: (org: Organization, project: Project) => Rep attribute: 'pg_pooler_max_connections', provider: 'reference-line', label: 'Max pooler connections', - value: getConnectionLimits(computeSize).pooler, + value: poolerConfig?.max_client_conn, tooltip: 'Maximum allowed pooler connections for your current compute size', isMaxValue: true, }, @@ -581,8 +593,7 @@ export const getReportAttributesV2: (org: Organization, project: Project) => Rep isReferenceLine: true, strokeDasharray: '4 2', label: 'Spend cap enabled', - value: - (project?.volumeSizeGb || getRecommendedDbSize(computeSize)) * 1024 * 1024 * 1024, + value: diskConfig?.attributes?.size_gb! * 1024 * 1024 * 1024, className: '[&_line]:!stroke-yellow-800 [&_line]:!opacity-100', opacity: 1, } @@ -592,77 +603,9 @@ export const getReportAttributesV2: (org: Organization, project: Project) => Rep isReferenceLine: true, label: '90% - Disk resize threshold', className: '[&_line]:!stroke-yellow-800', - value: - (project?.volumeSizeGb || getRecommendedDbSize(computeSize)) * - 1024 * - 1024 * - 1024 * - 0.9, // reaching 90% of the disk size will trigger a disk resize https://supabase.com/docs/guides/platform/database-size + value: diskConfig?.attributes?.size_gb! * 1024 * 1024 * 1024 * 0.9, }), ], }, ] } - -// Helper function to get connection limits based on compute size -export const getConnectionLimits = (computeSize: string = 'medium') => { - const connectionLimits = { - nano: { direct: 60, pooler: 200 }, - micro: { direct: 60, pooler: 200 }, - small: { direct: 90, pooler: 400 }, - medium: { direct: 120, pooler: 600 }, - large: { direct: 160, pooler: 800 }, - xlarge: { direct: 240, pooler: 1000 }, - '2xlarge': { direct: 380, pooler: 1500 }, - '4xlarge': { direct: 480, pooler: 3000 }, - '8xlarge': { direct: 490, pooler: 6000 }, - '12xlarge': { direct: 500, pooler: 9000 }, - '16xlarge': { direct: 500, pooler: 12000 }, - } - - return ( - connectionLimits[computeSize?.toLowerCase() as keyof typeof connectionLimits] || - connectionLimits.medium - ) -} - -// Helper function to get IOPS limits based on compute size -export const getIOPSLimits = (computeSize: string = 'medium') => { - const iopsLimits = { - nano: 250, - micro: 500, - small: 1000, - medium: 2000, - large: 3600, - xl: 6000, - '2xl': 12000, - '4xl': 20000, - '8xl': 40000, - '12xl': 50000, - '16xl': 80000, - } - - return iopsLimits[computeSize?.toLowerCase() as keyof typeof iopsLimits] || iopsLimits.medium -} - -// Helper function to get recommended DB size based on compute size (in GB) -export const getRecommendedDbSize = (computeSize: string = 'medium') => { - const recommendedSizes = { - nano: 0.5, // 500 MB - micro: 10, - small: 50, - medium: 100, - large: 200, - xl: 500, - '2xl': 1024, // 1 TB - '4xl': 2048, // 2 TB - '8xl': 4096, // 4 TB - '12xl': 6144, // 6 TB - '16xl': 10240, // 10 TB - } - - return ( - recommendedSizes[computeSize?.toLowerCase() as keyof typeof recommendedSizes] || - recommendedSizes.medium - ) -} diff --git a/apps/studio/hooks/misc/useChanged.ts b/apps/studio/hooks/misc/useChanged.ts new file mode 100644 index 0000000000000..afeeb0f319874 --- /dev/null +++ b/apps/studio/hooks/misc/useChanged.ts @@ -0,0 +1,12 @@ +import { useEffect, useRef } from 'react' + +export function useChanged(value: T): boolean { + const prev = useRef() + const changed = prev.current !== value + + useEffect(() => { + prev.current = value + }) + + return changed +} diff --git a/apps/studio/hooks/ui/useCsvFileDrop.ts b/apps/studio/hooks/ui/useCsvFileDrop.ts new file mode 100644 index 0000000000000..da2e91d449d5d --- /dev/null +++ b/apps/studio/hooks/ui/useCsvFileDrop.ts @@ -0,0 +1,61 @@ +import { type DragEvent, useCallback, useState } from 'react' + +import { type ImportDataFileDroppedEvent } from 'common/telemetry-constants' +import { flagInvalidFileImport } from 'components/interfaces/TableGridEditor/SidePanelEditor/SpreadsheetImport/SpreadsheetImport.utils' + +interface UseCsvFileDropOptions { + enabled: boolean + onFileDropped: (file: File) => void + onTelemetryEvent?: (eventName: ImportDataFileDroppedEvent['action']) => void +} + +interface UseCsvFileDropReturn { + isDraggedOver: boolean + onDragOver: (event: DragEvent) => void + onFileDrop: (event: DragEvent) => void +} + +export function useCsvFileDrop({ + enabled, + onFileDropped, + onTelemetryEvent, +}: UseCsvFileDropOptions): UseCsvFileDropReturn { + const [isDraggedOver, setIsDraggedOver] = useState(false) + + const onDragOver = useCallback( + (event: DragEvent) => { + if (!enabled) return + + if (event.type === 'dragover' && !isDraggedOver) { + setIsDraggedOver(true) + } else if (event.type === 'dragleave' || event.type === 'drop') { + setIsDraggedOver(false) + } + event.stopPropagation() + event.preventDefault() + }, + [enabled, isDraggedOver] + ) + + const onFileDrop = useCallback( + (event: DragEvent) => { + if (!enabled) return + + onDragOver(event) + + const [file] = event.dataTransfer.files + if (flagInvalidFileImport(file)) return + + onFileDropped(file) + + onTelemetryEvent?.('import_data_dropzone_file_added') + }, + [enabled, onDragOver, onFileDropped, onTelemetryEvent] + ) + + return { + isDraggedOver, + onDragOver, + onFileDrop, + } +} diff --git a/apps/studio/pages/project/[ref]/logs/index.tsx b/apps/studio/pages/project/[ref]/logs/index.tsx index 16e562702267c..2c04272a4c106 100644 --- a/apps/studio/pages/project/[ref]/logs/index.tsx +++ b/apps/studio/pages/project/[ref]/logs/index.tsx @@ -1,54 +1,25 @@ import { useRouter } from 'next/router' -import { useContext, useEffect } from 'react' +import { useEffect } from 'react' -import { FeatureFlagContext, LOCAL_STORAGE_KEYS, useParams } from 'common' +import { useParams } from 'common' import { useUnifiedLogsPreview } from 'components/interfaces/App/FeaturePreview/FeaturePreviewContext' import { UnifiedLogs } from 'components/interfaces/UnifiedLogs/UnifiedLogs' import DefaultLayout from 'components/layouts/DefaultLayout' -import LogsLayout from 'components/layouts/LogsLayout/LogsLayout' import ProjectLayout from 'components/layouts/ProjectLayout/ProjectLayout' -import { useLocalStorageQuery } from 'hooks/misc/useLocalStorage' -import { useSelectedOrganizationQuery } from 'hooks/misc/useSelectedOrganization' -import { IS_PLATFORM } from 'lib/constants' -import type { NextPageWithLayout } from 'types' +import { NextPageWithLayout } from 'types' export const LogPage: NextPageWithLayout = () => { const router = useRouter() const { ref } = useParams() - const { hasLoaded } = useContext(FeatureFlagContext) - const { data: org } = useSelectedOrganizationQuery() const { isEnabled: isUnifiedLogsEnabled } = useUnifiedLogsPreview() - const [lastVisitedLogsPage] = useLocalStorageQuery( - LOCAL_STORAGE_KEYS.LAST_VISITED_LOGS_PAGE, - 'explorer' - ) - useEffect(() => { - if (hasLoaded && !!org && !isUnifiedLogsEnabled) { - router.replace(`/project/${ref}/logs/${lastVisitedLogsPage}`) - } - }, [router, hasLoaded, org, lastVisitedLogsPage, ref, isUnifiedLogsEnabled]) - - // Handle redirects when unified logs preview flag changes - useEffect(() => { - // Only handle redirects if we're currently on a logs page - if (!router.asPath.includes('/logs') || (IS_PLATFORM && !hasLoaded)) return - - if (IS_PLATFORM && isUnifiedLogsEnabled) { - // If unified logs preview is enabled and we're not already on the main logs page - if (router.asPath !== `/project/${ref}/logs` && router.asPath.includes('/logs/')) { - router.push(`/project/${ref}/logs`) - } - } else { - // If unified logs preview is disabled and admin flag is also off - // and we're on the main logs page, redirect to explorer - if (router.asPath === `/project/${ref}/logs`) { - router.push(`/project/${ref}/logs/explorer`) - } + if (!isUnifiedLogsEnabled && ref) { + router.replace(`/project/${ref}/logs/explorer`) } - }, [isUnifiedLogsEnabled, router, ref, hasLoaded]) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isUnifiedLogsEnabled, ref]) if (isUnifiedLogsEnabled) { return ( @@ -60,14 +31,7 @@ export const LogPage: NextPageWithLayout = () => { ) } - return ( - - - {/* Empty placeholder - the useEffect will handle redirect */} -
-
-
- ) + return null } // Don't use getLayout since we're handling layouts conditionally within the component diff --git a/apps/studio/pages/project/[ref]/reports/[id].tsx b/apps/studio/pages/project/[ref]/reports/[id].tsx index 9f74349d49937..b149431d22272 100644 --- a/apps/studio/pages/project/[ref]/reports/[id].tsx +++ b/apps/studio/pages/project/[ref]/reports/[id].tsx @@ -5,7 +5,7 @@ import ReportsLayout from 'components/layouts/ReportsLayout/ReportsLayout' import type { NextPageWithLayout } from 'types' const PageLayout: NextPageWithLayout = () => ( -
+
diff --git a/apps/studio/pages/project/[ref]/reports/database.tsx b/apps/studio/pages/project/[ref]/reports/database.tsx index dab6b263b61b7..6a3fd20c33435 100644 --- a/apps/studio/pages/project/[ref]/reports/database.tsx +++ b/apps/studio/pages/project/[ref]/reports/database.tsx @@ -27,7 +27,10 @@ import GrafanaPromoBanner from 'components/ui/GrafanaPromoBanner' import Panel from 'components/ui/Panel' import { analyticsKeys } from 'data/analytics/keys' import { useProjectDiskResizeMutation } from 'data/config/project-disk-resize-mutation' +import { useDiskAttributesQuery } from 'data/config/disk-attributes-query' import { useDatabaseSizeQuery } from 'data/database/database-size-query' +import { useMaxConnectionsQuery } from 'data/database/max-connections-query' +import { usePgbouncerConfigQuery } from 'data/database/pgbouncer-config-query' import { getReportAttributes, getReportAttributesV2 } from 'data/reports/database-charts' import { useDatabaseReport } from 'data/reports/database-report-query' import { useAsyncCheckProjectPermissions } from 'hooks/misc/useCheckPermissions' @@ -97,6 +100,13 @@ const DatabaseUsage = () => { const databaseSizeBytes = databaseSizeData ?? 0 const currentDiskSize = project?.volumeSizeGb ?? 0 + const { data: diskConfig } = useDiskAttributesQuery({ projectRef: project?.ref }) + const { data: maxConnections } = useMaxConnectionsQuery({ + projectRef: project?.ref, + connectionString: project?.connectionString, + }) + const { data: poolerConfig } = usePgbouncerConfigQuery({ projectRef: project?.ref }) + const { can: canUpdateDiskSizeConfig } = useAsyncCheckProjectPermissions( PermissionAction.UPDATE, 'projects', @@ -107,8 +117,20 @@ const DatabaseUsage = () => { } ) - const REPORT_ATTRIBUTES = getReportAttributes(org!, project!) - const REPORT_ATTRIBUTES_V2 = getReportAttributesV2(org!, project!) + const REPORT_ATTRIBUTES = getReportAttributes( + org!, + project!, + diskConfig, + maxConnections, + poolerConfig + ) + const REPORT_ATTRIBUTES_V2 = getReportAttributesV2( + org!, + project!, + diskConfig, + maxConnections, + poolerConfig + ) const { isLoading: isUpdatingDiskSize } = useProjectDiskResizeMutation({ onSuccess: (_, variables) => { diff --git a/apps/studio/state/table-editor.tsx b/apps/studio/state/table-editor.tsx index be940f4135e8f..ed325bf861b10 100644 --- a/apps/studio/state/table-editor.tsx +++ b/apps/studio/state/table-editor.tsx @@ -26,7 +26,7 @@ export type SidePanel = type: 'foreign-row-selector' foreignKey: ForeignKeyState } - | { type: 'csv-import' } + | { type: 'csv-import'; file?: File } export type ConfirmationDialog = | { type: 'table'; isDeleteWithCascade: boolean } @@ -181,10 +181,10 @@ export const createTableEditorState = () => { sidePanel: { type: 'foreign-row-selector', foreignKey }, } }, - onImportData: () => { + onImportData: (file?: File) => { state.ui = { open: 'side-panel', - sidePanel: { type: 'csv-import' }, + sidePanel: { type: 'csv-import', file }, } }, diff --git a/packages/common/constants/local-storage.ts b/packages/common/constants/local-storage.ts index 449ea259bb9fc..db05c8dd204b5 100644 --- a/packages/common/constants/local-storage.ts +++ b/packages/common/constants/local-storage.ts @@ -62,8 +62,6 @@ export const LOCAL_STORAGE_KEYS = { // api keys view switcher for new and legacy api keys API_KEYS_VIEW: (ref: string) => `supabase-api-keys-view-${ref}`, - // last visited logs page - LAST_VISITED_LOGS_PAGE: 'supabase-last-visited-logs-page', LAST_VISITED_ORGANIZATION: 'last-visited-organization', // user impersonation selector previous searches diff --git a/packages/common/telemetry-constants.ts b/packages/common/telemetry-constants.ts index 674165090ee20..76f433da16e60 100644 --- a/packages/common/telemetry-constants.ts +++ b/packages/common/telemetry-constants.ts @@ -906,6 +906,18 @@ export interface ImportDataButtonClickedEvent { groups: TelemetryGroups } +/** + * User dropped a file into the import data dropzone on an empty table. + * + * @group Events + * @source studio + * @page /dashboard/project/{ref}/editor + */ +export interface ImportDataFileDroppedEvent { + action: 'import_data_dropzone_file_added' + groups: TelemetryGroups +} + /** * User added data from the import data via CSV/spreadsheet successfully. * @@ -1511,6 +1523,7 @@ export type TelemetryEvent = | HelpButtonClickedEvent | ExampleProjectCardClickedEvent | ImportDataButtonClickedEvent + | ImportDataFileDroppedEvent | ImportDataAddedEvent | SendFeedbackButtonClickedEvent | SqlEditorQueryRunButtonClickedEvent diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fd0c6fd21572e..383f72de40be8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -624,6 +624,9 @@ importers: '@types/unist': specifier: ^2.0.6 version: 2.0.8 + '@types/uuid': + specifier: ^10.0.0 + version: 10.0.0 api-types: specifier: workspace:* version: link:../../packages/api-types @@ -10921,7 +10924,6 @@ packages: resolution: {integrity: sha512-t0q23FIpvHDTtnORW+bDJziGsal5uh9RJTJ1fyH8drd4lICOoXhJ5pLMUZ5C0VQei6dNmwTzzoTRgMkO9JgHEQ==} peerDependencies: eslint: '>= 5' - bundledDependencies: [] eslint-plugin-import@2.31.0: resolution: {integrity: sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==}