diff --git a/apps/docs/components/Navigation/NavigationMenu/NavigationMenu.constants.ts b/apps/docs/components/Navigation/NavigationMenu/NavigationMenu.constants.ts index 2d3df44c8cfc2..13db7c5e8ca66 100644 --- a/apps/docs/components/Navigation/NavigationMenu/NavigationMenu.constants.ts +++ b/apps/docs/components/Navigation/NavigationMenu/NavigationMenu.constants.ts @@ -1717,6 +1717,25 @@ export const storage: NavMenuConstant = { { name: 'API Compatibility', url: '/guides/storage/s3/compatibility' }, ], }, + { + name: 'Analytics Buckets', + url: undefined, + items: [ + { name: 'Introduction', url: '/guides/storage/analytics/introduction' }, + { + name: 'Creating Analytics Buckets', + url: '/guides/storage/analytics/creating-analytics-buckets', + }, + { + name: 'Connecting to Analytics Buckets', + url: '/guides/storage/analytics/connecting-to-analytics-bucket', + }, + { + name: 'Limits', + url: '/guides/storage/analytics/limits', + }, + ], + }, { name: 'CDN', url: undefined, diff --git a/apps/docs/content/guides/local-development/declarative-database-schemas.mdx b/apps/docs/content/guides/local-development/declarative-database-schemas.mdx index bc71dc34edf99..bcbacbe281a78 100644 --- a/apps/docs/content/guides/local-development/declarative-database-schemas.mdx +++ b/apps/docs/content/guides/local-development/declarative-database-schemas.mdx @@ -42,12 +42,6 @@ create table "employees" ( - - -Make sure your local database is stopped before diffing your schema. - - - diff --git a/apps/docs/content/guides/storage/analytics/connecting-to-analytics-bucket.mdx b/apps/docs/content/guides/storage/analytics/connecting-to-analytics-bucket.mdx new file mode 100644 index 0000000000000..a3d193e0ecfac --- /dev/null +++ b/apps/docs/content/guides/storage/analytics/connecting-to-analytics-bucket.mdx @@ -0,0 +1,187 @@ +--- +title: 'Connecting to Analytics Buckets' +--- + + + +This feature is in **Private Alpha**. API stability and backward compatibility are not guaranteed at this stage. Reach out from this [Form](https://forms.supabase.com/analytics-buckets) to request access + + + +When interacting with Analytics Buckets, you authenticate against two main services - the Iceberg REST Catalog and the S3-Compatible Storage Endpoint. + +The **Iceberg REST Catalog** acts as the central management system for Iceberg tables. It allows Iceberg clients, such as PyIceberg and Apache Spark, to perform metadata operations including: + +- Creating and managing tables and namespaces +- Tracking schemas and handling schema evolution +- Managing partitions and snapshots +- Ensuring transactional consistency and isolation + +The REST Catalog itself does not store the actual data. Instead, it stores metadata describing the structure, schema, and partitioning strategy of Iceberg tables. + +Actual data storage and retrieval operations occur through the separate S3-compatible endpoint, optimized for reading and writing large analytical datasets stored in Parquet files. + +## Authentication + +To connect to an Analytics Bucket, you will need + +- An Iceberg client (Spark, PyIceberg, etc) which supports the REST Catalog interface. +- S3 credentials to authenticate your Iceberg client with the underlying S3 Bucket. + To create S3 Credentials go to [**Project Settings > Storage**](https://supabase.com/dashboard/project/_/settings/storage), for more information, see the [S3 Authentication Guide](https://supabase.com/docs/guides/storage/s3/authentication). We will support other authentication methods in the future. + +- The project reference and Service key for your Supabase project. + You can find your Service key in the Supabase Dashboard under [**Project Settings > API**.](https://supabase.com/dashboard/project/_/settings/api-keys) + +You will now have an **Access Key** and a **Secret Key** that you can use to authenticate your Iceberg client. + +## Connecting via PyIceberg + +PyIceberg is a Python client for Apache Iceberg, facilitating interaction with Iceberg Buckets. + +**Installation** + +```bash +pip install pyiceberg pyarrow +``` + +Here's a comprehensive example using PyIceberg with clearly separated configuration: + +```python +from pyiceberg.catalog import load_catalog +import pyarrow as pa +import datetime + +# Supabase project ref +PROJECT_REF = "" + +# Configuration for Iceberg REST Catalog +WAREHOUSE = "your-analytics-bucket-name" +TOKEN = "SERVICE_KEY" + +# Configuration for S3-Compatible Storage +S3_ACCESS_KEY = "KEY" +S3_SECRET_KEY = "SECRET" +S3_REGION = "PROJECT_REGION" + +S3_ENDPOINT = f"https://{PROJECT_REF}.supabase.co/storage/v1/s3" +CATALOG_URI = f"https://{PROJECT_REF}.supabase.co/storage/v1/iceberg" + +# Load the Iceberg catalog +catalog = load_catalog( + "analytics-bucket", + type="rest", + warehouse=WAREHOUSE, + uri=CATALOG_URI, + token=TOKEN, + **{ + "py-io-impl": "pyiceberg.io.pyarrow.PyArrowFileIO", + "s3.endpoint": S3_ENDPOINT, + "s3.access-key-id": S3_ACCESS_KEY, + "s3.secret-access-key": S3_SECRET_KEY, + "s3.region": S3_REGION, + "s3.force-virtual-addressing": False, + }, +) + +# Create namespace if it doesn't exist +catalog.create_namespace_if_not_exists("default") + +# Define schema for your Iceberg table +schema = pa.schema([ + pa.field("event_id", pa.int64()), + pa.field("event_name", pa.string()), + pa.field("event_timestamp", pa.timestamp("ms")), +]) + +# Create table (if it doesn't exist already) +table = catalog.create_table_if_not_exists(("default", "events"), schema=schema) + +# Generate and insert sample data +current_time = datetime.datetime.now() +data = pa.table({ + "event_id": [1, 2, 3], + "event_name": ["login", "logout", "purchase"], + "event_timestamp": [current_time, current_time, current_time], +}) + +# Append data to the Iceberg table +table.append(data) + +# Scan table and print data as pandas DataFrame +df = table.scan().to_pandas() +print(df) +``` + +## Connecting via Apache Spark + +Apache Spark allows distributed analytical queries against Iceberg Buckets. + +```python +from pyspark.sql import SparkSession + +# Supabase project ref +PROJECT_REF = "" + +# Configuration for Iceberg REST Catalog +WAREHOUSE = "your-analytics-bucket-name" +TOKEN = "SERVICE_KEY" + +# Configuration for S3-Compatible Storage +S3_ACCESS_KEY = "KEY" +S3_SECRET_KEY = "SECRET" +S3_REGION = "PROJECT_REGION" + +S3_ENDPOINT = f"https://{PROJECT_REF}.supabase.co/storage/v1/s3" +CATALOG_URI = f"https://{PROJECT_REF}.supabase.co/storage/v1/iceberg" + +# Initialize Spark session with Iceberg configuration +spark = SparkSession.builder \ + .master("local[*]") \ + .appName("SupabaseIceberg") \ + .config("spark.driver.host", "127.0.0.1") \ + .config("spark.driver.bindAddress", "127.0.0.1") \ + .config('spark.jars.packages', 'org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.6.1,org.apache.iceberg:iceberg-aws-bundle:1.6.1') \ + .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") \ + .config("spark.sql.catalog.my_catalog", "org.apache.iceberg.spark.SparkCatalog") \ + .config("spark.sql.catalog.my_catalog.type", "rest") \ + .config("spark.sql.catalog.my_catalog.uri", CATALOG_URI) \ + .config("spark.sql.catalog.my_catalog.warehouse", WAREHOUSE) \ + .config("spark.sql.catalog.my_catalog.token", TOKEN) \ + .config("spark.sql.catalog.my_catalog.s3.endpoint", S3_ENDPOINT) \ + .config("spark.sql.catalog.my_catalog.s3.path-style-access", "true") \ + .config("spark.sql.catalog.my_catalog.s3.access-key-id", S3_ACCESS_KEY) \ + .config("spark.sql.catalog.my_catalog.s3.secret-access-key", S3_SECRET_KEY) \ + .config("spark.sql.catalog.my_catalog.s3.remote-signing-enabled", "false") \ + .config("spark.sql.defaultCatalog", "my_catalog") \ + .getOrCreate() + +# SQL Operations +spark.sql("CREATE NAMESPACE IF NOT EXISTS analytics") + +spark.sql(""" + CREATE TABLE IF NOT EXISTS analytics.users ( + user_id BIGINT, + username STRING + ) + USING iceberg +""") + +spark.sql(""" + INSERT INTO analytics.users (user_id, username) + VALUES (1, 'Alice'), (2, 'Bob'), (3, 'Charlie') +""") + +result_df = spark.sql("SELECT * FROM analytics.users") +result_df.show() +``` + +## Connecting to the Iceberg REST Catalog directly + +To authenticate with the Iceberg REST Catalog directly, you need to provide a valid Supabase **Service key** as a Bearer token. + +``` +curl \ + --request GET -sL \ + --url 'https://.supabase.co/storage/v1/iceberg/v1/config?warehouse=' \ + --header 'Authorization: Bearer ' +``` diff --git a/apps/docs/content/guides/storage/analytics/creating-analytics-buckets.mdx b/apps/docs/content/guides/storage/analytics/creating-analytics-buckets.mdx new file mode 100644 index 0000000000000..dd0093161d500 --- /dev/null +++ b/apps/docs/content/guides/storage/analytics/creating-analytics-buckets.mdx @@ -0,0 +1,38 @@ +--- +title: 'Creating Analytics Buckets' +subtitle: '' +--- + + + +This feature is in **Private Alpha**. API stability and backward compatibility are not guaranteed at this stage. Reach out from this [Form](https://forms.supabase.com/analytics-buckets) to request access + + + +Analytics Buckets use [Apache Iceberg](https://iceberg.apache.org/), an open-table format for managing large analytical datasets. +You can interact with them using tools such as [PyIceberg](https://py.iceberg.apache.org/), [Apache Spark](https://spark.apache.org/) or any client which supports the [standard Iceberg REST Catalog API](https://editor-next.swagger.io/?url=https://raw.githubusercontent.com/apache/iceberg/main/open-api/rest-catalog-open-api.yaml). + +You can create an Analytics Bucket using either the Supabase SDK or the Supabase Dashboard. + +### Using the Supabase SDK + +```ts +import { createClient } from '@supabase/supabase-js' + +const supabase = createClient('https://your-project.supabase.co', 'your-service-key') + +supabase.storage.createBucket('my-analytics-bucket', { + type: 'ANALYTICS', +}) +``` + +### Using the Supabase Dashboard + +1. Navigate to the Storage section in the Supabase Dashboard. +2. Click on "Create Bucket". +3. Enter a name for your bucket (e.g., my-analytics-bucket). +4. Select "Analytics Bucket" as the bucket type. + +Storage schema design + +Now, that you have created your Analytics Bucket, you can start [connecting to it](/docs/guides/storage/analytics/connecting-to-analytics-bucket) with Iceberg clients like PyIceberg or Apache Spark. diff --git a/apps/docs/content/guides/storage/analytics/introduction.mdx b/apps/docs/content/guides/storage/analytics/introduction.mdx new file mode 100644 index 0000000000000..0f4249efe972b --- /dev/null +++ b/apps/docs/content/guides/storage/analytics/introduction.mdx @@ -0,0 +1,24 @@ +--- +title: 'Analytics Buckets' +subtitle: '' +--- + + + +This feature is in **Private Alpha**. API stability and backward compatibility are not guaranteed at this stage. Reach out from this [Form](https://forms.supabase.com/analytics-buckets) to request access + + + +**Analytics Buckets** are designed for analytical workflows on large datasets without impacting your main database. + +Postgres tables are optimized for handling real-time, transactional workloads with frequent inserts, updates, deletes and low-latency queries. **Analytical workloads** have very different requirements: processing large volumes of historical data, running complex queries and aggregations, minimizing storage costs, and ensuring these analytical queries do not interfere with the production traffic. + +**Analytics Buckets** address these requirements using [Apache Iceberg](https://iceberg.apache.org/), an open-table format for managing large analytical datasets efficiently. + +Analytics Buckets are ideal for +• Data warehousing and business intelligence +• Historical data archiving +• Periodically refreshed real-time analytics +• Complex analytical queries over large datasets + +By separating transactional and analytical workloads, Supabase makes it easy to build scalable analytics pipelines without impacting your primary Postgres performance. diff --git a/apps/docs/content/guides/storage/analytics/limits.mdx b/apps/docs/content/guides/storage/analytics/limits.mdx new file mode 100644 index 0000000000000..0eac0b0f57ca5 --- /dev/null +++ b/apps/docs/content/guides/storage/analytics/limits.mdx @@ -0,0 +1,23 @@ +--- +title: 'Analytics Buckets Limits' +subtitle: '' +--- + + + +This feature is in **Private Alpha**. API stability and backward compatibility are not guaranteed at this stage. Reach out from this [Form](https://forms.supabase.com/analytics-buckets) to request access + + + +The following default limits are applied when this feature is in the private alpha stage, they can be adjusted on a case-by-case basis: + +| **Category** | **Limit** | +| --------------------------------------- | --------- | +| Number of Analytics Buckets per project | 2 | +| Number of namespaces per bucket | 10 | +| Number of tables per namespace | 10 | + +## Pricing + +Analytics Buckets are Free to use during the Private Alpha phase, +however, you'll still be charged for the underlying egress. diff --git a/apps/docs/docs/ref/kotlin/installing.mdx b/apps/docs/docs/ref/kotlin/installing.mdx index 93a918c49ffc1..5c4a05158b273 100644 --- a/apps/docs/docs/ref/kotlin/installing.mdx +++ b/apps/docs/docs/ref/kotlin/installing.mdx @@ -151,25 +151,25 @@ custom_edit_url: https://github.com/supabase/supabase/edit/master/web/spec/supab ```kotlin - val commonMain by getting { + commonMain { dependencies { - //supabase modules + //Supabase modules } } - val jvmMain by getting { + jvmMain { dependencies { implementation("io.ktor:ktor-client-cio:KTOR_VERSION") } } - val androidMain by getting { - dependsOn(jvmMain) + androidMain { + dependsOn(jvmMain.get()) } - val jsMain by getting { + jsMain { dependencies { implementation("io.ktor:ktor-client-js:KTOR_VERSION") } } - val iosMain by getting { + iosMain { dependencies { implementation("io.ktor:ktor-client-darwin:KTOR_VERSION") } diff --git a/apps/docs/public/img/storage/iceberg-bucket.png b/apps/docs/public/img/storage/iceberg-bucket.png new file mode 100644 index 0000000000000..57c557b242618 Binary files /dev/null and b/apps/docs/public/img/storage/iceberg-bucket.png differ diff --git a/apps/docs/spec/supabase_kt_v3.yml b/apps/docs/spec/supabase_kt_v3.yml index 3aa366b00f3df..f6a4988ea7216 100644 --- a/apps/docs/spec/supabase_kt_v3.yml +++ b/apps/docs/spec/supabase_kt_v3.yml @@ -3720,9 +3720,11 @@ functions: $ref: '@supabase/gotrue-js.GoTrueClient.onAuthStateChange' notes: | Listen to session changes. + - `sessionStatus` is a `Flow` that emits the current session status. Everything related to the session is handled by this flow. + - `events` is a `Flow` that emits auth events. This flow is used to listen to auth events that are independent of the current session, like OTP errors, refresh failures, etc. Experimental. examples: - id: listen-to-auth-changes - name: Listen to auth changes + name: Listen to session status changes isSpotlight: true code: | ```kotlin @@ -3742,7 +3744,9 @@ functions: } } SessionStatus.Initializing -> println("Initializing") - is SessionStatus.RefreshFailure -> println("Refresh failure ${it.cause}") //Either a network error or a internal server error + is SessionStatus.RefreshFailure -> { + println("Session expired and could not be refreshed") + } is SessionStatus.NotAuthenticated -> { if(it.isSignOut) { println("User signed out") @@ -3759,6 +3763,20 @@ functions: - `Initializing`, - `RefreshFailure(cause)`, - `Authenticated(session, source)` + - id: listen-to-events + name: Listen to auth events + isSpotlight: false + code: | + ```kotlin + supabase.auth.events.collect { + when(it) { + is AuthEvent.OtpError -> + println("Found error in current URL / deeplink during the OAuth flow: ${it.error}") + is AuthEvent.RefreshFailure -> + println("Failed to refresh session: ${it.error}") + } + } + ``` - id: reset-password-for-email title: 'Send a password reset request' notes: | @@ -4053,7 +4071,7 @@ functions: supabaseKey = "supabaseKey" ) { install(Auth) { - minimalSettings() //disables session saving and auto-refreshing + minimalConfig() //disables session saving and auto-refreshing } // install other plugins (these will use the service role key) } diff --git a/apps/studio/components/grid/components/grid/Grid.tsx b/apps/studio/components/grid/components/grid/Grid.tsx index 116cbf7045519..fe907cabf8af0 100644 --- a/apps/studio/components/grid/components/grid/Grid.tsx +++ b/apps/studio/components/grid/components/grid/Grid.tsx @@ -6,12 +6,13 @@ import { formatForeignKeys } from 'components/interfaces/TableGridEditor/SidePan import { useProjectContext } from 'components/layouts/ProjectLayout/ProjectContext' import AlertError from 'components/ui/AlertError' import { useForeignKeyConstraintsQuery } from 'data/database/foreign-key-constraints-query' +import { ENTITY_TYPE } from 'data/entity-types/entity-type-constants' import { useSendEventMutation } from 'data/telemetry/send-event-mutation' import { useSelectedOrganization } from 'hooks/misc/useSelectedOrganization' import { useTableEditorStateSnapshot } from 'state/table-editor' import { useTableEditorTableStateSnapshot } from 'state/table-editor-table' import { Button, cn } from 'ui' -import { GenericSkeletonLoader } from 'ui-patterns' +import { GenericSkeletonLoader } from 'ui-patterns/ShimmeringLoader' import type { Filter, GridProps, SupaRow } from '../../types' import { useOnRowsChange } from './Grid.utils' import RowRenderer from './RowRenderer' @@ -67,6 +68,7 @@ export const Grid = memo( } const table = snap.table + const tableEntityType = snap.originalTable?.entity_type const { mutate: sendEvent } = useSendEventMutation() const org = useSelectedOrganization() @@ -82,13 +84,13 @@ export const Grid = memo( table?.columns.find((x) => x.name == columnName)?.foreignKey ?? {} const fk = data?.find( - (key: any) => + (key) => key.source_schema === table?.schema && key.source_table === table?.name && key.source_columns.includes(columnName) && key.target_schema === targetTableSchema && key.target_table === targetTableName && - key.target_columns.includes(targetColumnName) + key.target_columns.includes(targetColumnName ?? '') ) return fk !== undefined ? formatForeignKeys([fk])[0] : undefined @@ -141,11 +143,15 @@ export const Grid = memo( {(filters ?? []).length === 0 ? (

This table is empty

-

- Add rows to your table to get started. -

-
- { + {tableEntityType === ENTITY_TYPE.FOREIGN_TABLE ? ( +
+

+ This table is a foreign table. Add data to the connected source to get + started. +

+
+ ) : ( +
- } -
+
+ )}
) : (
diff --git a/apps/studio/components/interfaces/App/FeaturePreview/Branching2Preview.tsx b/apps/studio/components/interfaces/App/FeaturePreview/Branching2Preview.tsx new file mode 100644 index 0000000000000..88911c2d4b027 --- /dev/null +++ b/apps/studio/components/interfaces/App/FeaturePreview/Branching2Preview.tsx @@ -0,0 +1,64 @@ +import Image from 'next/image' + +import { InlineLink } from 'components/ui/InlineLink' +import { BASE_PATH } from 'lib/constants' + +export const Branching2Preview = () => { + return ( +
+ api-docs-side-panel-preview +

+ Branching 2.0 introduces a new workflow for managing database branches without having to use + Git. Create branches, review changes and merge back into production all through the + dashboard. Read the below limitations and our{' '} + + branching documentation + {' '} + before opting in. +

+
+

Limitations:

+
    +
  • Custom roles created through the dashboard are not captured on branch creation.
  • +
  • + Only public schema changes are supported right now. +
  • +
  • Extensions are not included in the diff process
  • +
  • + Branches can only be merged to main; merging between preview branches is + not supported. +
  • +
  • + If your branch is out of date, you can pull in latest changes from main, + but keep in mind that all functions will be overwritten. +
  • +
  • + Deleting functions must be done manually on main. +
  • +
  • Migration conflicts must be manually resolved on the preview branch.
  • +
  • + If you have run migrations on main, new branches will be created from + existing migrations instead of a full schema dump. +
  • +
+
+ +
+

Enabling this preview will:

+
    +
  • Enable the new Branching 2.0 workflow for your project.
  • +
  • + Allow you to create, manage, and merge database branches with improved UI and features. +
  • +
  • Access new merge request and deployment management tools.
  • +
+
+
+ ) +} diff --git a/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreview.constants.tsx b/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreview.constants.tsx index 2db32d6c747a5..5c64c5747075b 100644 --- a/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreview.constants.tsx +++ b/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreview.constants.tsx @@ -1,6 +1,13 @@ import { LOCAL_STORAGE_KEYS } from 'common' export const FEATURE_PREVIEWS = [ + { + key: LOCAL_STORAGE_KEYS.UI_PREVIEW_BRANCHING_2_0, + name: 'Branching 2.0', + discussionsUrl: 'https://github.com/orgs/supabase/discussions/branching-2-0', + isNew: true, + isPlatformOnly: true, + }, { key: LOCAL_STORAGE_KEYS.UI_PREVIEW_REALTIME_SETTINGS, name: 'Realtime settings', diff --git a/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewContext.tsx b/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewContext.tsx index 01ed929dc8639..c71a9beda01f0 100644 --- a/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewContext.tsx +++ b/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewContext.tsx @@ -84,3 +84,8 @@ export const useIsRealtimeSettingsEnabled = () => { const { flags } = useFeaturePreviewContext() return flags[LOCAL_STORAGE_KEYS.UI_PREVIEW_REALTIME_SETTINGS] } + +export const useIsBranching2Enabled = () => { + const { flags } = useFeaturePreviewContext() + return flags[LOCAL_STORAGE_KEYS.UI_PREVIEW_BRANCHING_2_0] +} diff --git a/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewModal.tsx b/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewModal.tsx index 477392fa4f68a..906853d35f1e6 100644 --- a/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewModal.tsx +++ b/apps/studio/components/interfaces/App/FeaturePreview/FeaturePreviewModal.tsx @@ -5,7 +5,7 @@ import { ReactNode } from 'react' import { LOCAL_STORAGE_KEYS, useParams } from 'common' import { useSendEventMutation } from 'data/telemetry/send-event-mutation' import { useSelectedOrganization } from 'hooks/misc/useSelectedOrganization' -import { useIsRealtimeSettingsFFEnabled } from 'hooks/ui/useFlag' +import { useIsRealtimeSettingsFFEnabled, useFlag } from 'hooks/ui/useFlag' import { IS_PLATFORM } from 'lib/constants' import { useAppStateSnapshot } from 'state/app-state' import { Badge, Button, Modal, ScrollArea, cn } from 'ui' @@ -15,10 +15,12 @@ import { FEATURE_PREVIEWS } from './FeaturePreview.constants' import { useFeaturePreviewContext } from './FeaturePreviewContext' import { InlineEditorPreview } from './InlineEditorPreview' import { RealtimeSettingsPreview } from './RealtimeSettingsPreview' +import { Branching2Preview } from './Branching2Preview' const FEATURE_PREVIEW_KEY_TO_CONTENT: { [key: string]: ReactNode } = { + [LOCAL_STORAGE_KEYS.UI_PREVIEW_BRANCHING_2_0]: , [LOCAL_STORAGE_KEYS.UI_PREVIEW_REALTIME_SETTINGS]: , [LOCAL_STORAGE_KEYS.UI_PREVIEW_INLINE_EDITOR]: , [LOCAL_STORAGE_KEYS.UI_PREVIEW_API_SIDE_PANEL]: , @@ -31,14 +33,16 @@ const FeaturePreviewModal = () => { const org = useSelectedOrganization() const featurePreviewContext = useFeaturePreviewContext() const { mutate: sendEvent } = useSendEventMutation() - const isRealtimeSettingsEnabled = useIsRealtimeSettingsFFEnabled() + const gitlessBranchingEnabled = useFlag('gitlessBranching') // [Joshen] Use this if we want to feature flag previews function isReleasedToPublic(feature: (typeof FEATURE_PREVIEWS)[number]) { switch (feature.key) { case 'supabase-ui-realtime-settings': return isRealtimeSettingsEnabled + case 'supabase-ui-branching-2-0': + return gitlessBranchingEnabled default: return true } diff --git a/apps/studio/components/interfaces/BranchManagement/BranchSelector.tsx b/apps/studio/components/interfaces/BranchManagement/BranchSelector.tsx index 51c2c5528e13f..9714d8e5e0680 100644 --- a/apps/studio/components/interfaces/BranchManagement/BranchSelector.tsx +++ b/apps/studio/components/interfaces/BranchManagement/BranchSelector.tsx @@ -56,9 +56,11 @@ export const BranchSelector = ({ content: { side: 'bottom', text: - availableBranches.length === 0 - ? 'All branches currently have merge requests' - : undefined, + branches.length === 0 + ? 'Create a branch first to start a merge request' + : availableBranches.length === 0 + ? 'All branches currently have merge requests' + : undefined, }, }} > diff --git a/apps/studio/components/interfaces/BranchManagement/CreateBranchModal.tsx b/apps/studio/components/interfaces/BranchManagement/CreateBranchModal.tsx index fca9784bed051..7f3074d126973 100644 --- a/apps/studio/components/interfaces/BranchManagement/CreateBranchModal.tsx +++ b/apps/studio/components/interfaces/BranchManagement/CreateBranchModal.tsx @@ -10,8 +10,10 @@ import { toast } from 'sonner' import * as z from 'zod' import { useParams } from 'common' +import { useIsBranching2Enabled } from 'components/interfaces/App/FeaturePreview/FeaturePreviewContext' import { BranchingPITRNotice } from 'components/layouts/AppLayout/EnableBranchingButton/BranchingPITRNotice' import AlertError from 'components/ui/AlertError' +import { ButtonTooltip } from 'components/ui/ButtonTooltip' import { GenericSkeletonLoader } from 'components/ui/ShimmeringLoader' import UpgradeToPro from 'components/ui/UpgradeToPro' import { useBranchCreateMutation } from 'data/branches/branch-create-mutation' @@ -22,7 +24,6 @@ import { projectKeys } from 'data/projects/keys' import { useProjectAddonsQuery } from 'data/subscriptions/project-addons-query' import { useSelectedOrganization } from 'hooks/misc/useSelectedOrganization' import { useSelectedProject } from 'hooks/misc/useSelectedProject' -import { useFlag } from 'hooks/ui/useFlag' import { BASE_PATH, IS_PLATFORM } from 'lib/constants' import { useAppStateSnapshot } from 'state/app-state' import { @@ -50,7 +51,7 @@ export const CreateBranchModal = () => { const queryClient = useQueryClient() const projectDetails = useSelectedProject() const selectedOrg = useSelectedOrganization() - const gitlessBranching = useFlag('gitlessBranching') + const gitlessBranching = useIsBranching2Enabled() const { showCreateBranchModal, setShowCreateBranchModal } = useAppStateSnapshot() const organization = useSelectedOrganization() @@ -367,7 +368,7 @@ export const CreateBranchModal = () => { > Cancel - + diff --git a/apps/studio/components/interfaces/BranchManagement/EditBranchModal.tsx b/apps/studio/components/interfaces/BranchManagement/EditBranchModal.tsx index 7161c91db3170..bed088dd52826 100644 --- a/apps/studio/components/interfaces/BranchManagement/EditBranchModal.tsx +++ b/apps/studio/components/interfaces/BranchManagement/EditBranchModal.tsx @@ -16,7 +16,7 @@ import { useCheckGithubBranchValidity } from 'data/integrations/github-branch-ch import { useGitHubConnectionsQuery } from 'data/integrations/github-connections-query' import { useSelectedOrganization } from 'hooks/misc/useSelectedOrganization' import { useSelectedProject } from 'hooks/misc/useSelectedProject' -import { useFlag } from 'hooks/ui/useFlag' +import { useIsBranching2Enabled } from 'components/interfaces/App/FeaturePreview/FeaturePreviewContext' import { BASE_PATH } from 'lib/constants' import { useRouter } from 'next/router' import { @@ -50,7 +50,7 @@ export const EditBranchModal = ({ branch, visible, onClose }: EditBranchModalPro const router = useRouter() const projectDetails = useSelectedProject() const selectedOrg = useSelectedOrganization() - const gitlessBranching = useFlag('gitlessBranching') + const gitlessBranching = useIsBranching2Enabled() const [isGitBranchValid, setIsGitBranchValid] = useState(false) diff --git a/apps/studio/components/interfaces/BranchManagement/Overview.tsx b/apps/studio/components/interfaces/BranchManagement/Overview.tsx index e631c4d394ed1..c4c2be7670cc0 100644 --- a/apps/studio/components/interfaces/BranchManagement/Overview.tsx +++ b/apps/studio/components/interfaces/BranchManagement/Overview.tsx @@ -23,7 +23,7 @@ import { useBranchUpdateMutation } from 'data/branches/branch-update-mutation' import type { Branch } from 'data/branches/branches-query' import { branchKeys } from 'data/branches/keys' import { useCheckPermissions } from 'hooks/misc/useCheckPermissions' -import { useFlag } from 'hooks/ui/useFlag' +import { useIsBranching2Enabled } from 'components/interfaces/App/FeaturePreview/FeaturePreviewContext' import { Button, DropdownMenu, @@ -163,7 +163,7 @@ const PreviewBranchActions = ({ onSelectDeleteBranch: () => void generateCreatePullRequestURL: (branchName?: string) => string }) => { - const gitlessBranching = useFlag('gitlessBranching') + const gitlessBranching = useIsBranching2Enabled() const queryClient = useQueryClient() const projectRef = branch.parent_project_ref ?? branch.project_ref diff --git a/apps/studio/components/interfaces/Integrations/Wrappers/EditWrapperSheet.tsx b/apps/studio/components/interfaces/Integrations/Wrappers/EditWrapperSheet.tsx index 87703801cc5a4..041c2c518b9f8 100644 --- a/apps/studio/components/interfaces/Integrations/Wrappers/EditWrapperSheet.tsx +++ b/apps/studio/components/interfaces/Integrations/Wrappers/EditWrapperSheet.tsx @@ -58,7 +58,9 @@ export const EditWrapperSheet = ({ }, }) - const [wrapperTables, setWrapperTables] = useState(formatWrapperTables(wrapper, wrapperMeta)) + const [wrapperTables, setWrapperTables] = useState(() => + formatWrapperTables(wrapper, wrapperMeta) + ) const [isEditingTable, setIsEditingTable] = useState(false) const [selectedTableToEdit, setSelectedTableToEdit] = useState( undefined @@ -93,7 +95,8 @@ export const EditWrapperSheet = ({ const { wrapper_name } = values if (wrapper_name.length === 0) errors.name = 'Please provide a name for your wrapper' - if (wrapperTables.length === 0) errors.tables = 'Please add at least one table' + if (!wrapperMeta.canTargetSchema && wrapperTables.length === 0) + errors.tables = 'Please add at least one table' if (!isEmpty(errors)) return setFormErrors(errors) updateFDW({ diff --git a/apps/studio/components/interfaces/Integrations/Wrappers/Wrappers.utils.ts b/apps/studio/components/interfaces/Integrations/Wrappers/Wrappers.utils.ts index 92f2101a570b6..63786de027c37 100644 --- a/apps/studio/components/interfaces/Integrations/Wrappers/Wrappers.utils.ts +++ b/apps/studio/components/interfaces/Integrations/Wrappers/Wrappers.utils.ts @@ -109,7 +109,7 @@ export const formatWrapperTables = ( }) } -export const convertKVStringArrayToJson = (values: string[]) => { +export const convertKVStringArrayToJson = (values: string[]): Record => { return Object.fromEntries(values.map((value) => value.split('='))) } diff --git a/apps/studio/components/interfaces/Settings/Logs/LogsPreviewer.tsx b/apps/studio/components/interfaces/Settings/Logs/LogsPreviewer.tsx index 85a79c6533d11..f47f9c5086185 100644 --- a/apps/studio/components/interfaces/Settings/Logs/LogsPreviewer.tsx +++ b/apps/studio/components/interfaces/Settings/Logs/LogsPreviewer.tsx @@ -14,6 +14,7 @@ import { useSelectedLog } from 'hooks/analytics/useSelectedLog' import useSingleLog from 'hooks/analytics/useSingleLog' import { useSelectedOrganization } from 'hooks/misc/useSelectedOrganization' import { useUpgradePrompt } from 'hooks/misc/useUpgradePrompt' +import { useFlag } from 'hooks/ui/useFlag' import { useDatabaseSelectorStateSnapshot } from 'state/database-selector' import { Button } from 'ui' import { LogsBarChart } from 'ui-patterns/LogsBarChart' @@ -27,9 +28,8 @@ import { } from './Logs.constants' import type { Filters, LogSearchCallback, LogTemplate, QueryType } from './Logs.types' import { maybeShowUpgradePrompt } from './Logs.utils' -import UpgradePrompt from './UpgradePrompt' -import { useFlag } from 'hooks/ui/useFlag' import { PreviewFilterPanelWithUniversal } from './PreviewFilterPanelWithUniversal' +import UpgradePrompt from './UpgradePrompt' /** * Acts as a container component for the entire log display diff --git a/apps/studio/components/interfaces/Settings/Logs/PreviewFilterPanelWithUniversal.tsx b/apps/studio/components/interfaces/Settings/Logs/PreviewFilterPanelWithUniversal.tsx index ab7f5ae19c825..0a8c4502216f1 100644 --- a/apps/studio/components/interfaces/Settings/Logs/PreviewFilterPanelWithUniversal.tsx +++ b/apps/studio/components/interfaces/Settings/Logs/PreviewFilterPanelWithUniversal.tsx @@ -5,7 +5,6 @@ import { useRouter } from 'next/router' import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { useParams } from 'common' -// import CSVButton from 'components/ui/CSVButton' import DatabaseSelector from 'components/ui/DatabaseSelector' import { useLoadBalancersQuery } from 'data/read-replicas/load-balancers-query' import { IS_PLATFORM } from 'lib/constants' @@ -21,11 +20,6 @@ import { DatePickerValue } from './Logs.DatePickers' import { FILTER_OPTIONS, LOG_ROUTES_WITH_REPLICA_SUPPORT, LogsTableName } from './Logs.constants' import type { Filters, LogSearchCallback, LogTemplate } from './Logs.types' -interface CustomDateRangePickerProps { - value?: { from: Date; to?: Date } - onChange: (range: { from: Date; to?: Date } | undefined) => void -} - function CustomDateRangePicker({ onChange, onCancel }: CustomOptionProps) { const [dateRange, setDateRange] = useState() diff --git a/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx b/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx index 552e59f16e8c8..1d5f0fa82369c 100644 --- a/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx +++ b/apps/studio/components/interfaces/Storage/CreateBucketModal.tsx @@ -1,10 +1,15 @@ +import { zodResolver } from '@hookform/resolvers/zod' +import { snakeCase } from 'lodash' import { ChevronDown } from 'lucide-react' import Link from 'next/link' import { useRouter } from 'next/router' import { useEffect, useState } from 'react' +import { SubmitHandler, useForm } from 'react-hook-form' import { toast } from 'sonner' +import z from 'zod' import { useParams } from 'common' +import { useIcebergWrapperExtension } from 'components/to-be-cleaned/Storage/AnalyticBucketDetails/useIcebergWrapper' import { StorageSizeUnits } from 'components/to-be-cleaned/Storage/StorageSettings/StorageSettings.constants' import { convertFromBytes, @@ -12,94 +17,145 @@ import { } from 'components/to-be-cleaned/Storage/StorageSettings/StorageSettings.utils' import { useProjectStorageConfigQuery } from 'data/config/project-storage-config-query' import { useBucketCreateMutation } from 'data/storage/bucket-create-mutation' -import { IS_PLATFORM } from 'lib/constants' -import { Button, Collapsible, Form, Input, Listbox, Modal, Toggle, cn } from 'ui' -import { Admonition } from 'ui-patterns' +import { useIcebergWrapperCreateMutation } from 'data/storage/iceberg-wrapper-create-mutation' +import { BASE_PATH, IS_PLATFORM } from 'lib/constants' +import { + Alert_Shadcn_, + AlertDescription_Shadcn_, + AlertTitle_Shadcn_, + Button, + cn, + Collapsible, + Form_Shadcn_, + FormControl_Shadcn_, + FormField_Shadcn_, + Input_Shadcn_, + Label_Shadcn_, + Listbox, + Modal, + RadioGroupStacked, + RadioGroupStackedItem, + Toggle, + WarningIcon, +} from 'ui' +import { Admonition } from 'ui-patterns/admonition' +import { FormItemLayout } from 'ui-patterns/form/FormItemLayout/FormItemLayout' export interface CreateBucketModalProps { visible: boolean onClose: () => void } +const FormSchema = z.object({ + name: z + .string() + .trim() + .min(1, 'Please provide a name for your bucket') + .regex( + /^[a-z0-9.-]+$/, + 'The name of the bucket must only contain lowercase letters, numbers, dots, and hyphens' + ) + .refine((value) => !value.endsWith(' '), 'The name of the bucket cannot end with a whitespace') + .refine( + (value) => value !== 'public', + '"public" is a reserved name. Please choose another name' + ), + type: z.enum(['STANDARD', 'ANALYTICS']).default('STANDARD'), + public: z.boolean().default(false), + has_file_size_limit: z.boolean().default(false), + formatted_size_limit: z.coerce + .number() + .min(0, 'File size upload limit has to be at least 0') + .default(0), + allowed_mime_types: z.string().trim().default(''), +}) + +export type CreateBucketForm = z.infer + const CreateBucketModal = ({ visible, onClose }: CreateBucketModalProps) => { const { ref } = useParams() const router = useRouter() - const { mutate: createBucket, isLoading: isCreating } = useBucketCreateMutation({ - onSuccess: (res) => { - toast.success(`Successfully created bucket ${res.name}`) - router.push(`/project/${ref}/storage/buckets/${res.name}`) - onClose() - }, - }) + const { mutateAsync: createBucket, isLoading: isCreating } = useBucketCreateMutation() + const { mutateAsync: createIcebergWrapper, isLoading: isCreatingIcebergWrapper } = + useIcebergWrapperCreateMutation() - const { data } = useProjectStorageConfigQuery( - { projectRef: ref }, - { enabled: IS_PLATFORM && visible } - ) + const { data } = useProjectStorageConfigQuery({ projectRef: ref }, { enabled: IS_PLATFORM }) const { value, unit } = convertFromBytes(data?.fileSizeLimit ?? 0) const formattedGlobalUploadLimit = `${value} ${unit}` const [selectedUnit, setSelectedUnit] = useState(StorageSizeUnits.BYTES) const [showConfiguration, setShowConfiguration] = useState(false) - const initialValues = { - name: '', - public: false, - file_size_limit: 0, - allowed_mime_types: '', - has_file_size_limit: false, - formatted_size_limit: 0, - } + const form = useForm({ + resolver: zodResolver(FormSchema), + defaultValues: { + name: '', + public: false, + type: 'STANDARD', + has_file_size_limit: false, + formatted_size_limit: 0, + allowed_mime_types: '', + }, + }) - const validate = (values: any) => { - const errors = {} as any + const bucketName = snakeCase(form.watch('name')) + const isPublicBucket = form.watch('public') + const isStandardBucket = form.watch('type') === 'STANDARD' + const hasFileSizeLimit = form.watch('has_file_size_limit') + const formattedSizeLimit = form.watch('formatted_size_limit') + const icebergWrapperExtensionState = useIcebergWrapperExtension() - if (!values.name) { - errors.name = 'Please provide a name for your bucket' - } + const onSubmit: SubmitHandler = async (values) => { + if (!ref) return console.error('Project ref is required') - if (values.name && !/^[a-z0-9.-]+$/.test(values.name)) { - errors.name = - 'The name of the bucket must only container lowercase letters, numbers, dots, and hyphens' + if (values.type === 'ANALYTICS' && !icebergCatalogEnabled) { + toast.error( + 'The Analytics catalog feature is not enabled for your project. Please contact support to enable it.' + ) + return } - if (values.name && values.name.endsWith(' ')) { - errors.name = 'The name of the bucket cannot end with a whitespace' - } + try { + const fileSizeLimit = values.has_file_size_limit + ? convertToBytes(values.formatted_size_limit, selectedUnit) + : undefined - if (values.has_file_size_limit && values.formatted_size_limit < 0) { - errors.formatted_size_limit = 'File size upload limit has to be at least 0' - } - if (values.name === 'public') { - errors.name = '"public" is a reserved name. Please choose another name' - } - return errors - } + const allowedMimeTypes = + values.allowed_mime_types.length > 0 + ? values.allowed_mime_types.split(',').map((x) => x.trim()) + : undefined - const onSubmit = async (values: any) => { - if (!ref) return console.error('Project ref is required') + await createBucket({ + projectRef: ref, + id: values.name, + type: values.type, + isPublic: values.public, + file_size_limit: fileSizeLimit, + allowed_mime_types: allowedMimeTypes, + }) - createBucket({ - projectRef: ref, - id: values.name, - isPublic: values.public, - file_size_limit: values.has_file_size_limit - ? convertToBytes(values.formatted_size_limit, selectedUnit) - : null, - allowed_mime_types: - values.allowed_mime_types.length > 0 - ? values.allowed_mime_types.split(',').map((x: string) => x.trim()) - : null, - }) + if (values.type === 'ANALYTICS' && icebergWrapperExtensionState === 'installed') { + await createIcebergWrapper({ bucketName: values.name }) + } + toast.success(`Successfully created bucket ${values.name}`) + router.push(`/project/${ref}/storage/buckets/${values.name}`) + onClose() + } catch (error) { + console.error(error) + toast.error('Failed to create bucket') + } } useEffect(() => { if (visible) { + form.reset() setSelectedUnit(StorageSizeUnits.BYTES) setShowConfiguration(false) } - }, [visible]) + }, [visible, form]) + + const icebergCatalogEnabled = data?.features?.icebergCatalog?.enabled return ( { header="Create storage bucket" onCancel={() => onClose()} > -
- {({ values }: { values: any }) => { - const isPublicBucket = values.public - - return ( - <> - - + + + ( + -
- + + + + + )} + /> + +
+ ( + + + field.onChange(v)} + value={field.value} + > + +
+
+

+ Compatible with S3 buckets. +

+
+
+
+ {IS_PLATFORM && ( + +
+
+

+ Stores Iceberg files and is optimized for analytical workloads. +

+
+
+ {icebergCatalogEnabled ? null : ( +
+ + + This feature is currently in alpha and not yet enabled for your + project. Sign up{' '} + + here + + +
+ )} +
+ )} +
+
+
+ )} + /> +
+ + + {isStandardBucket ? ( + <> + +
+ ( + + + + + + )} /> {isPublicBucket && ( { )}
+ setShowConfiguration(!showConfiguration)} > -
-

Additional configuration

+
+

Additional restrictions

{
- ( + + + + + + )} /> - {values.has_file_size_limit && ( + {hasFileSizeLimit && (
- { - if (event.charCode < 48 || event.charCode > 57) { - event.preventDefault() - } - }} - descriptionText={`Equivalent to ${convertToBytes( - values.formatted_size_limit, - selectedUnit - ).toLocaleString()} bytes.`} + render={({ field }) => ( + + + { + if (event.charCode < 48 || event.charCode > 57) { + event.preventDefault() + } + }} + /> + + + Equivalent to{' '} + {convertToBytes( + formattedSizeLimit, + selectedUnit + ).toLocaleString()}{' '} + bytes. + + + )} />
@@ -232,36 +390,122 @@ const CreateBucketModal = ({ visible, onClose }: CreateBucketModalProps) => {
)}
- ( + + + + + + )} />
- - - - - - ) - }} - + ) : ( + + {icebergWrapperExtensionState === 'installed' ? ( + +

+ Supabase will setup a + + foreign data wrapper + {bucketName && {`${bucketName}_fdw`}} + + + {' '} + for easier access to the data. This action will also create{' '} + + S3 Access Keys + {bucketName && ( + <> + {' '} + named {`${bucketName}_keys`} + + )} + + and + + four Vault Secrets + {bucketName && ( + <> + {' '} + prefixed with{' '} + {`${bucketName}_vault_`} + + )} + + . + +

+

+ As a final step, you'll need to create an{' '} + Iceberg namespace before you + connect the Iceberg data to your database. +

+
+ ) : ( + + + + You need to install the Iceberg wrapper extension to connect your Analytic + bucket to your database. + + +

+ You need to install the wrappers extension + (with the minimum version of 0.5.3) if you want to connect your + Analytics bucket to your database. +

+
+
+ )} +
+ )} + + + + + + + ) } diff --git a/apps/studio/components/interfaces/Storage/EditBucketModal.tsx b/apps/studio/components/interfaces/Storage/EditBucketModal.tsx index aff15a75ef7ed..5abb63cd4d582 100644 --- a/apps/studio/components/interfaces/Storage/EditBucketModal.tsx +++ b/apps/studio/components/interfaces/Storage/EditBucketModal.tsx @@ -15,11 +15,11 @@ import { useProjectStorageConfigQuery } from 'data/config/project-storage-config import { useBucketUpdateMutation } from 'data/storage/bucket-update-mutation' import { IS_PLATFORM } from 'lib/constants' import { Admonition } from 'ui-patterns' -import type { StorageBucket } from './Storage.types' +import { Bucket } from 'data/storage/buckets-query' export interface EditBucketModalProps { visible: boolean - bucket?: StorageBucket + bucket?: Bucket onClose: () => void } diff --git a/apps/studio/components/interfaces/Storage/ImportForeignSchemaDialog.tsx b/apps/studio/components/interfaces/Storage/ImportForeignSchemaDialog.tsx new file mode 100644 index 0000000000000..a173dbcfa52de --- /dev/null +++ b/apps/studio/components/interfaces/Storage/ImportForeignSchemaDialog.tsx @@ -0,0 +1,167 @@ +import { zodResolver } from '@hookform/resolvers/zod' +import { snakeCase } from 'lodash' +import { useEffect, useState } from 'react' +import { SubmitHandler, useForm } from 'react-hook-form' +import { toast } from 'sonner' +import z from 'zod' + +import { useParams } from 'common' +import { useProjectContext } from 'components/layouts/ProjectLayout/ProjectContext' +import SchemaSelector from 'components/ui/SchemaSelector' +import { useFDWImportForeignSchemaMutation } from 'data/fdw/fdw-import-foreign-schema-mutation' +import { + Button, + Form_Shadcn_, + FormControl_Shadcn_, + FormField_Shadcn_, + Input_Shadcn_, + Modal, +} from 'ui' +import { FormItemLayout } from 'ui-patterns/form/FormItemLayout/FormItemLayout' +import SchemaEditor from '../TableGridEditor/SidePanelEditor/SchemaEditor' + +export interface ImportForeignSchemaDialogProps { + bucketName: string + namespace: string + excludedSchemas: string[] + wrapperValues: Record + visible: boolean + onClose: () => void +} + +const FormSchema = z.object({ + bucketName: z.string().trim(), + sourceNamespace: z.string().trim(), + targetSchema: z.string().trim(), +}) + +export type ImportForeignSchemaForm = z.infer + +export const ImportForeignSchemaDialog = ({ + bucketName, + namespace, + excludedSchemas, + wrapperValues, + visible, + onClose, +}: ImportForeignSchemaDialogProps) => { + const { project } = useProjectContext() + const { ref } = useParams() + const [loading, setLoading] = useState(false) + const [createSchemaSheetOpen, setCreateSchemaSheetOpen] = useState(false) + + const { mutateAsync: importForeignSchema } = useFDWImportForeignSchemaMutation({ + onSuccess: () => { + toast.success(`Successfully connected ${bucketName} to the database.`) + onClose() + }, + }) + + const form = useForm({ + resolver: zodResolver(FormSchema), + defaultValues: { + bucketName, + sourceNamespace: namespace, + targetSchema: '', + }, + }) + + const onSubmit: SubmitHandler = async (values) => { + if (!ref) return console.error('Project ref is required') + setLoading(true) + + try { + await importForeignSchema({ + projectRef: ref, + connectionString: project?.connectionString, + serverName: `${snakeCase(values.bucketName)}_fdw_server`, + sourceSchema: values.sourceNamespace, + targetSchema: values.targetSchema, + }) + } catch (error: any) { + // error will be handled by the mutation onError callback + } finally { + setLoading(false) + } + } + + useEffect(() => { + if (visible) { + form.reset({ + bucketName, + sourceNamespace: namespace, + targetSchema: '', + }) + } + }, [visible, form, bucketName, namespace]) + + return ( + + Connect namespace {namespace} + + } + onCancel={() => onClose()} + > + +
+ + ( + + + + + + )} + /> + + ( + + field.onChange(schema)} + onSelectCreateSchema={() => setCreateSchemaSheetOpen(true)} + /> + + )} + /> + + + + + + + +
+ setCreateSchemaSheetOpen(false)} + onSuccess={(schema) => { + form.setValue('targetSchema', schema) + setCreateSchemaSheetOpen(false) + }} + /> +
+ ) +} diff --git a/apps/studio/components/interfaces/Storage/Storage.types.ts b/apps/studio/components/interfaces/Storage/Storage.types.ts index 37b00902f284c..8de8a32625ee4 100644 --- a/apps/studio/components/interfaces/Storage/Storage.types.ts +++ b/apps/studio/components/interfaces/Storage/Storage.types.ts @@ -4,17 +4,6 @@ export interface StoragePolicyFormField extends PolicyFormField { allowedOperations: string[] } -export interface StorageBucket { - id: string - name: string - owner: string - public: boolean - file_size_limit: number | null - allowed_mime_types: string[] | null - created_at: string - updated_at: string -} - export interface BucketUpdatePayload { public?: boolean file_size_limit?: number | null diff --git a/apps/studio/components/interfaces/UnifiedLogs/UnifiedLogs.tsx b/apps/studio/components/interfaces/UnifiedLogs/UnifiedLogs.tsx index 0cb64d8ab947b..36490a091ff59 100644 --- a/apps/studio/components/interfaces/UnifiedLogs/UnifiedLogs.tsx +++ b/apps/studio/components/interfaces/UnifiedLogs/UnifiedLogs.tsx @@ -297,7 +297,7 @@ export const UnifiedLogs = () => { > - +

{bucket.public && Public} + {bucket.type === 'ANALYTICS' && ( + + + + + +

Analytics bucket

+ + + )}
{canUpdateBuckets && isSelected ? ( -
+ ) : ( +
+
+ ) + ) : null + } + /> + ) +} diff --git a/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/NamespaceRow.tsx b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/NamespaceRow.tsx new file mode 100644 index 0000000000000..80d75a33da411 --- /dev/null +++ b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/NamespaceRow.tsx @@ -0,0 +1,132 @@ +import { RefreshCw, SquareArrowOutUpRight } from 'lucide-react' +import { useMemo, useState } from 'react' + +import { FormattedWrapperTable } from 'components/interfaces/Integrations/Wrappers/Wrappers.utils' +import { ImportForeignSchemaDialog } from 'components/interfaces/Storage/ImportForeignSchemaDialog' +import { useProjectContext } from 'components/layouts/ProjectLayout/ProjectContext' +import { ButtonTooltip } from 'components/ui/ButtonTooltip' +import { useFDWImportForeignSchemaMutation } from 'data/fdw/fdw-import-foreign-schema-mutation' +import { FDW } from 'data/fdw/fdws-query' +import { useIcebergNamespaceTablesQuery } from 'data/storage/iceberg-namespace-tables-query' +import { BASE_PATH } from 'lib/constants' +import { Badge, Button, TableCell, TableRow } from 'ui' + +type NamespaceRowProps = { + bucketName: string + namespace: string + schema: string + excludedSchemas: string[] + tables: (FormattedWrapperTable & { id: number })[] + token: string + wrapperInstance: FDW + wrapperValues: Record +} + +export const NamespaceRow = ({ + bucketName, + namespace, + schema, + excludedSchemas, + tables, + token, + wrapperInstance, + wrapperValues, +}: NamespaceRowProps) => { + const { project } = useProjectContext() + const [importForeignSchemaShown, setImportForeignSchemaShown] = useState(false) + + const targetSchema = tables[0]?.schema_name ?? '' + + const { data: tablesData, isLoading: isLoadingNamespaceTables } = useIcebergNamespaceTablesQuery( + { + catalogUri: wrapperValues.catalog_uri, + warehouse: wrapperValues.warehouse, + token: token, + namespace: namespace, + }, + { enabled: !!token } + ) + + const { mutateAsync: importForeignSchema, isLoading: isImportingForeignSchema } = + useFDWImportForeignSchemaMutation() + + const rescanNamespace = async () => { + await importForeignSchema({ + projectRef: project?.ref, + connectionString: project?.connectionString, + serverName: wrapperInstance.server_name, + sourceSchema: namespace, + targetSchema: targetSchema, + }) + } + + const missingTables = useMemo(() => { + return (tablesData || []).filter( + (t) => !tables.find((table) => table.table.split('.')[1] === t) + ) + }, [tablesData, tables]) + + let scanTooltip = useMemo(() => { + if (isImportingForeignSchema) return 'Scanning for new tables...' + if (isLoadingNamespaceTables) return 'Loading tables...' + if (missingTables.length > 0) return `Found ${missingTables.length} new tables` + if (tables.length === 0) return 'No tables found' + return 'All tables are up to date' + }, [isImportingForeignSchema, isLoadingNamespaceTables, missingTables.length, tables.length]) + + return ( + + + {namespace} + + {schema && {schema}} + + {tablesData ? `${tables.length}/${tablesData.length} connected tables` : ``} + + +
+ 0 ? 'primary' : 'default'} + icon={} + loading={isImportingForeignSchema || isLoadingNamespaceTables} + onClick={() => (schema ? rescanNamespace() : setImportForeignSchemaShown(true))} + disabled={missingTables.length === 0} + tooltip={{ content: { text: scanTooltip } }} + > + Sync + + {schema ? ( + + + + ) : ( + } + disabled + tooltip={{ + content: { text: 'There are no tables connected.' }, + }} + > + Table Editor + + )} +
+
+ setImportForeignSchemaShown(false)} + /> +
+ ) +} diff --git a/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/SimpleConfigurationDetails.tsx b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/SimpleConfigurationDetails.tsx new file mode 100644 index 0000000000000..feccb8aa02fa4 --- /dev/null +++ b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/SimpleConfigurationDetails.tsx @@ -0,0 +1,69 @@ +import Link from '@ui/components/Typography/Link' +import { useProjectContext } from 'components/layouts/ProjectLayout/ProjectContext' +import { ScaffoldSectionDescription, ScaffoldSectionTitle } from 'components/layouts/Scaffold' +import { useProjectSettingsV2Query } from 'data/config/project-settings-v2-query' +import { Card } from 'ui' +import { getCatalogURI, getConnectionURL } from '../StorageSettings/StorageSettings.utils' +import { DESCRIPTIONS } from './constants' +import { CopyEnvButton } from './CopyEnvButton' +import { DecryptedReadOnlyInput } from './DecryptedReadOnlyInput' + +const wrapperMeta = { + options: [ + { name: 'vault_token', label: 'Vault Token', secureEntry: false }, + { name: 'warehouse', label: 'Warehouse', secureEntry: false }, + { name: 's3.endpoint', label: 'S3 Endpoint', secureEntry: false }, + { name: 'catalog_uri', label: 'Catalog URI', secureEntry: false }, + ], +} + +export const SimpleConfigurationDetails = ({ bucketName }: { bucketName: string }) => { + const { project } = useProjectContext() + const { data: settings } = useProjectSettingsV2Query({ projectRef: project?.ref }) + const protocol = settings?.app_config?.protocol ?? 'https' + const endpoint = settings?.app_config?.endpoint + const serviceApiKey = + (settings?.service_api_keys ?? []).find((key) => key.tags === 'service_role')?.api_key ?? + 'SUPABASE_CLIENT_SERVICE_KEY' + + const values: Record = { + vault_token: serviceApiKey, + warehouse: bucketName, + 's3.endpoint': getConnectionURL(project?.ref ?? '', protocol, endpoint), + catalog_uri: getCatalogURI(project?.ref ?? '', protocol, endpoint), + } + + return ( +
+
+
+ Configuration Details + + You can use the following configuration details to connect to the bucket from your code. + +
+ +
+ +

+ To get AWS credentials, you can create them using the{' '} + + S3 Access Keys + {' '} + feature. +

+ {wrapperMeta.options.map((option) => { + return ( + + ) + })} +
+
+ ) +} diff --git a/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/constants.ts b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/constants.ts new file mode 100644 index 0000000000000..c9851d3bef345 --- /dev/null +++ b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/constants.ts @@ -0,0 +1,26 @@ +export const OPTION_ORDER = [ + 'catalog_uri', + 'vault_token', + 'warehouse', + 's3.endpoint', + 'vault_aws_access_key_id', + 'vault_aws_secret_access_key', +] + +export const LABELS: Record = { + vault_aws_access_key_id: 'S3 Access Key ID', + vault_aws_secret_access_key: 'S3 Secret Access Key', + vault_token: 'Catalog Token', + warehouse: 'Warehouse Name', + 's3.endpoint': 'S3 Endpoint', + catalog_uri: 'Catalog URI', +} + +export const DESCRIPTIONS: Record = { + vault_aws_access_key_id: 'Matches the AWS access key ID from a S3 Access Key.', + vault_aws_secret_access_key: 'Matches the AWS secret access from a S3 Access Key.', + vault_token: 'Corresponds to the service role key.', + warehouse: 'Matches the name of the bucket.', + 's3.endpoint': '', + catalog_uri: '', +} diff --git a/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/index.tsx b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/index.tsx new file mode 100644 index 0000000000000..49aead6e1f072 --- /dev/null +++ b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/index.tsx @@ -0,0 +1,403 @@ +import { snakeCase, uniq } from 'lodash' +import Link from 'next/link' +import { useMemo } from 'react' + +import { INTEGRATIONS } from 'components/interfaces/Integrations/Landing/Integrations.constants' +import { WRAPPER_HANDLERS } from 'components/interfaces/Integrations/Wrappers/Wrappers.constants' +import { WrapperMeta } from 'components/interfaces/Integrations/Wrappers/Wrappers.types' +import { + convertKVStringArrayToJson, + formatWrapperTables, + wrapperMetaComparator, +} from 'components/interfaces/Integrations/Wrappers/Wrappers.utils' +import { useProjectContext } from 'components/layouts/ProjectLayout/ProjectContext' +import { + ScaffoldContainer, + ScaffoldHeader, + ScaffoldSectionDescription, + ScaffoldSectionTitle, + ScaffoldTitle, +} from 'components/layouts/Scaffold' +import { DocsButton } from 'components/ui/DocsButton' +import { + DatabaseExtension, + useDatabaseExtensionsQuery, +} from 'data/database-extensions/database-extensions-query' +import { useFDWsQuery } from 'data/fdw/fdws-query' +import { Bucket } from 'data/storage/buckets-query' +import { useIcebergNamespacesQuery } from 'data/storage/iceberg-namespaces-query' +import { useIcebergWrapperCreateMutation } from 'data/storage/iceberg-wrapper-create-mutation' +import { useVaultSecretDecryptedValueQuery } from 'data/vault/vault-secret-decrypted-value-query' +import { + Alert_Shadcn_, + AlertDescription_Shadcn_, + AlertTitle_Shadcn_, + Button, + Card, + Table, + TableBody, + TableHead, + TableHeader, + TableRow, + WarningIcon, +} from 'ui' +import { GenericSkeletonLoader } from 'ui-patterns/ShimmeringLoader' +import { DESCRIPTIONS, LABELS, OPTION_ORDER } from './constants' +import { CopyEnvButton } from './CopyEnvButton' +import { DecryptedReadOnlyInput } from './DecryptedReadOnlyInput' +import { NamespaceRow } from './NamespaceRow' +import { SimpleConfigurationDetails } from './SimpleConfigurationDetails' +import { useIcebergWrapperExtension } from './useIcebergWrapper' + +export const AnalyticBucketDetails = ({ bucket }: { bucket: Bucket }) => { + const { project } = useProjectContext() + + const { data: extensionsData } = useDatabaseExtensionsQuery({ + projectRef: project?.ref, + connectionString: project?.connectionString, + }) + + const { data, isLoading: isFDWsLoading } = useFDWsQuery({ + projectRef: project?.ref, + connectionString: project?.connectionString, + }) + + /** The wrapper instance is the wrapper that is installed for this Analytics bucket. */ + const wrapperInstance = useMemo(() => { + return data + ?.filter((wrapper) => + wrapperMetaComparator( + { + handlerName: WRAPPER_HANDLERS.ICEBERG, + server: { + options: [], + }, + }, + wrapper + ) + ) + .find((w) => w.name === snakeCase(`${bucket.name}_fdw`)) + }, [data, bucket.name]) + + const extensionState = useIcebergWrapperExtension() + + const integration = INTEGRATIONS.find((i) => i.id === 'iceberg_wrapper' && i.type === 'wrapper') + + const wrapperValues = convertKVStringArrayToJson(wrapperInstance?.server_options ?? []) + const wrapperMeta = (integration?.type === 'wrapper' && integration.meta) as WrapperMeta + + const { data: token, isSuccess: isSuccessToken } = useVaultSecretDecryptedValueQuery( + { + projectRef: project?.ref, + connectionString: project?.connectionString, + id: wrapperValues.vault_token, + }, + { + enabled: wrapperValues.vault_token !== undefined, + } + ) + + const { data: namespacesData, isLoading: isLoadingNamespaces } = useIcebergNamespacesQuery( + { + catalogUri: wrapperValues.catalog_uri, + warehouse: wrapperValues.warehouse, + token: token!, + }, + { enabled: isSuccessToken } + ) + + const wrapperTables = useMemo(() => { + if (!wrapperInstance) return [] + + return formatWrapperTables(wrapperInstance, wrapperMeta!) + }, [wrapperInstance, wrapperMeta]) + + const namespaces = useMemo(() => { + const fdwNamespaces = wrapperTables.map((t) => t.table.split('.')[0]) as string[] + const namespaces = uniq([...fdwNamespaces, ...(namespacesData ?? [])]) + + return namespaces.map((namespace) => { + const tables = wrapperTables.filter((t) => t.table.split('.')[0] === namespace) + const schema = tables[0]?.schema + + return { + namespace: namespace, + schema: schema, + tables: tables, + } + }) + }, [wrapperTables, namespacesData]) + const excludedSchemas = uniq(namespaces.map((n) => n.schema)) + + const wrappersExtension = extensionsData?.find((ext) => ext.name === 'wrappers') + + const state = isFDWsLoading + ? 'loading' + : extensionState === 'installed' + ? wrapperInstance + ? 'added' + : 'missing' + : extensionState + + return ( +
+ + + + Analytics Bucket {bucket.name} + + + Namespaces and tables connected to this bucket. + + + + + + {state === 'loading' && } + {state === 'not-installed' && ( + + )} + {state === 'needs-upgrade' && ( + + )} + {state === 'added' && wrapperInstance && ( + <> +
+ {isLoadingNamespaces || isFDWsLoading ? ( + + ) : namespaces.length === 0 ? ( + +

No namespaces in this bucket

+

+ Create a namespace and add some data{' '} + + {' '} + to get started + +

+
+ ) : ( + + + + + Namespace + Schema + Tables + Actions + + + + {namespaces.map(({ namespace, schema, tables }) => ( + + ))} + +
+
+ )} +
+ +
+
+
+ Connection Details + + You can use the following parameters to connect to the bucket from an Iceberg + client. + +
+
+ !option.hidden && wrapperValues[option.name] + )} + values={wrapperValues} + /> + +
+
+ + {wrapperMeta.server.options + .filter((option) => !option.hidden && wrapperValues[option.name]) + .sort((a, b) => OPTION_ORDER.indexOf(a.name) - OPTION_ORDER.indexOf(b.name)) + .map((option) => { + return ( + + ) + })} + +
+ + )} + {state === 'missing' && } +
+
+ ) +} + +const ExtensionNotInstalled = ({ + bucketName, + projectRef, + wrapperMeta, + wrappersExtension, +}: { + bucketName: string + projectRef: string + wrapperMeta: WrapperMeta + wrappersExtension: DatabaseExtension +}) => { + const databaseNeedsUpgrading = + (wrappersExtension?.default_version ?? '') < (wrapperMeta?.minimumExtensionVersion ?? '') + + return ( + <> + + + + You need to install the wrappers extension to connect this Analytics bucket to the + database. + + +

+ The {wrapperMeta.label} wrapper requires the Wrappers extension to be installed. You can + install version {wrappersExtension?.installed_version} + {databaseNeedsUpgrading && + ' which is below the minimum version that supports Iceberg wrapper'} + . Please {databaseNeedsUpgrading && 'upgrade your database then '}install the{' '} + wrappers extension to create this wrapper. +

+
+ + + +
+ + + ) +} + +const ExtensionNeedsUpgrade = ({ + bucketName, + projectRef, + wrapperMeta, + wrappersExtension, +}: { + bucketName: string + projectRef: string + wrapperMeta: WrapperMeta + wrappersExtension: DatabaseExtension +}) => { + // [Joshen] Default version is what's on the DB, so if the installed version is already the default version + // but still doesnt meet the minimum extension version, then DB upgrade is required + const databaseNeedsUpgrading = + wrappersExtension?.installed_version === wrappersExtension?.default_version + + return ( + <> + + + + Your extension version is outdated for this wrapper. + + +

+ The {wrapperMeta.label} wrapper requires a minimum extension version of{' '} + {wrapperMeta.minimumExtensionVersion}. You have version{' '} + {wrappersExtension?.installed_version} installed. Please{' '} + {databaseNeedsUpgrading && 'upgrade your database then '}update the extension by + disabling and enabling the wrappers extension to create + this wrapper. +

+

+ Warning: Before reinstalling the wrapper extension, you must first remove all existing + wrappers. Afterward, you can recreate the wrappers. +

+
+ + + +
+ + + ) +} + +const WrapperMissing = ({ bucketName }: { bucketName: string }) => { + const { mutateAsync: createIcebergWrapper, isLoading: isCreatingIcebergWrapper } = + useIcebergWrapperCreateMutation() + + const onSetupWrapper = async () => { + await createIcebergWrapper({ bucketName }) + } + + return ( + <> + + + + This Analytics bucket does not have a foreign data wrapper setup. + + +

You need to setup a wrapper to connect this bucket to the database.

+
+ + + +
+ + + ) +} diff --git a/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/useIcebergWrapper.tsx b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/useIcebergWrapper.tsx new file mode 100644 index 0000000000000..56387d46095fe --- /dev/null +++ b/apps/studio/components/to-be-cleaned/Storage/AnalyticBucketDetails/useIcebergWrapper.tsx @@ -0,0 +1,35 @@ +import { INTEGRATIONS } from 'components/interfaces/Integrations/Landing/Integrations.constants' +import { useProjectContext } from 'components/layouts/ProjectLayout/ProjectContext' +import { useDatabaseExtensionsQuery } from 'data/database-extensions/database-extensions-query' + +export const useIcebergWrapperExtension = () => { + const { project } = useProjectContext() + const { data: extensionsData, isLoading: isExtensionsLoading } = useDatabaseExtensionsQuery({ + projectRef: project?.ref, + connectionString: project?.connectionString, + }) + + const integration = INTEGRATIONS.find((i) => i.id === 'iceberg_wrapper') + + if (!integration || integration.type !== 'wrapper') { + // This should never happen + return 'not-found' + } + + const wrapperMeta = integration.meta + + const wrappersExtension = extensionsData?.find((ext) => ext.name === 'wrappers') + const isWrappersExtensionInstalled = !!wrappersExtension?.installed_version + const hasRequiredVersion = + (wrappersExtension?.installed_version ?? '') >= (wrapperMeta?.minimumExtensionVersion ?? '') + + const state = isExtensionsLoading + ? 'loading' + : isWrappersExtensionInstalled + ? hasRequiredVersion + ? 'installed' + : 'needs-upgrade' + : ('not-installed' as const) + + return state +} diff --git a/apps/studio/components/to-be-cleaned/Storage/DeleteBucketModal.tsx b/apps/studio/components/to-be-cleaned/Storage/DeleteBucketModal.tsx index a02f950e8e2a4..ac7293b105c2a 100644 --- a/apps/studio/components/to-be-cleaned/Storage/DeleteBucketModal.tsx +++ b/apps/studio/components/to-be-cleaned/Storage/DeleteBucketModal.tsx @@ -73,7 +73,7 @@ const DeleteBucketModal = ({ visible = false, bucket, onClose }: DeleteBucketMod const onDeleteBucket = async () => { if (!projectRef) return console.error('Project ref is required') if (!bucket) return console.error('No bucket is selected') - deleteBucket({ projectRef, id: bucket.id }) + deleteBucket({ projectRef, id: bucket.id, type: bucket.type }) } return ( diff --git a/apps/studio/components/to-be-cleaned/Storage/StorageExplorer/StorageExplorer.tsx b/apps/studio/components/to-be-cleaned/Storage/StorageExplorer/StorageExplorer.tsx index a268b1aaefff5..cfc8ecd5c40ad 100644 --- a/apps/studio/components/to-be-cleaned/Storage/StorageExplorer/StorageExplorer.tsx +++ b/apps/studio/components/to-be-cleaned/Storage/StorageExplorer/StorageExplorer.tsx @@ -167,7 +167,7 @@ const StorageExplorer = ({ bucket }: StorageExplorerProps) => { return (
{selectedItems.length === 0 ? ( = { diff --git a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterCommand.tsx b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterCommand.tsx index 066f881d895a8..355d482ee45ad 100644 --- a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterCommand.tsx +++ b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterCommand.tsx @@ -62,6 +62,10 @@ export function DataTableFilterCommand({ const trimmedInputValue = inputValue.trim() + const queryFields = filterFields.filter( + (x) => typeof x.value === 'string' && currentWord.includes(`${x.value}:`) + ) + // [Joshen] Temporarily disabling as this conflicts with our current CMD K behaviour // useHotKey(() => setOpen((open) => !open), 'k') @@ -80,18 +84,6 @@ export function DataTableFilterCommand({ const field = _filterFields?.find((field) => field.value === filter.id) return !field?.commandDisabled }) - const currentDisabledFilters = currentFilters.filter((filter) => { - const field = _filterFields?.find((field) => field.value === filter.id) - return field?.commandDisabled - }) - - const commandDisabledFilterKeys = currentDisabledFilters.reduce( - (prev, curr) => { - prev[curr.id] = curr.value - return prev - }, - {} as Record - ) for (const key of Object.keys(searchParams)) { const value = searchParams[key as keyof typeof searchParams] @@ -198,6 +190,7 @@ export function DataTableFilterCommand({
{/* default height is 300px but in case of more, we'd like to tease the user */} + No results found. {filterFields.map((field) => { if (typeof field.value !== 'string') return null @@ -227,106 +220,113 @@ export function DataTableFilterCommand({ }} className="group" > - {field.value} + {field.label} ) })} + - - {filterFields?.map((field) => { - if (typeof field.value !== 'string') return null - if (!currentWord.includes(`${field.value}:`)) return null - const column = table.getColumn(field.value) - const facetedValue = - getFacetedUniqueValues?.(table, field.value) || column?.getFacetedUniqueValues() + {queryFields.length > 0 && ( + <> + + {queryFields.map((field) => { + const column = table.getColumn(field.value) + const facetedValue = + getFacetedUniqueValues?.(table, field.value) || + column?.getFacetedUniqueValues() - const options = getFieldOptions({ field }) + const options = getFieldOptions({ field }) - return options.map((optionValue) => { - return ( - { - e.preventDefault() - e.stopPropagation() - }} - onSelect={(value) => { - setInputValue((prev) => - replaceInputByFieldType({ - prev, - currentWord, - optionValue, - value, - field, - }) - ) - setCurrentWord('') - }} - > - {`${optionValue}`} - {facetedValue?.has(optionValue) ? ( - - {formatCompactNumber(facetedValue.get(optionValue) || 0)} - - ) : null} - - ) - }) - })} - - - - {lastSearches - ?.sort((a, b) => b.timestamp - a.timestamp) - .slice(0, 5) - .map((item) => { - return ( - { - e.preventDefault() - e.stopPropagation() - }} - onSelect={(value) => { - const search = value.replace('suggestion:', '') - setInputValue(`${search} `) - setCurrentWord('') - }} - className="group" - > - {item.search} - - {formatDistanceToNow(item.timestamp, { - addSuffix: true, - })} - - - - ) - })} - - No results found. + {item.search} + + {formatDistanceToNow(item.timestamp, { + addSuffix: true, + })} + + + + ) + })} + + )} +
@@ -370,10 +370,12 @@ export function DataTableFilterCommand({ function CommandItemSuggestions({ field }: { field: DataTableFilterField }) { const { table, getFacetedMinMaxValues, getFacetedUniqueValues } = useDataTable() const value = field.value as string + const className = 'ml-2 hidden truncate text-foreground-lighter group-aria-[selected=true]:block' + switch (field.type) { case 'checkbox': { return ( - + {getFacetedUniqueValues ? Array.from(getFacetedUniqueValues(table, value)?.keys() || []) .map((value) => `[${value}]`) @@ -385,17 +387,13 @@ function CommandItemSuggestions({ field }: { field: DataTableFilterField< case 'slider': { const [min, max] = getFacetedMinMaxValues?.(table, value) || [field.min, field.max] return ( - + [{min} - {max}] ) } case 'input': { - return ( - - [{`${String(field.value)}`} input] - - ) + return [{`${String(field.value)}`} input] } default: { return null diff --git a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterControls.tsx b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterControls.tsx index 97c16ecd89a0e..628ba8a49f94b 100644 --- a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterControls.tsx +++ b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterControls.tsx @@ -11,6 +11,7 @@ import { DataTableFilterResetButton } from './DataTableFilterResetButton' import { DataTableFilterSlider } from './DataTableFilterSlider' import { DataTableFilterTimerange } from './DataTableFilterTimerange' +import { DateRangeDisabled } from '../DataTable.types' import { useDataTable } from '../providers/DataTableProvider' // FIXME: use @container (especially for the slider element) to restructure elements @@ -18,7 +19,11 @@ import { useDataTable } from '../providers/DataTableProvider' // TODO: only pass the columns to generate the filters! // https://tanstack.com/table/v8/docs/framework/react/examples/filters -export function DataTableFilterControls() { +interface DataTableFilterControls { + dateRangeDisabled?: DateRangeDisabled +} + +export function DataTableFilterControls({ dateRangeDisabled }: DataTableFilterControls) { const { filterFields } = useDataTable() return ( } case 'timerange': { - return + return ( + + ) } } })()} diff --git a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterTimerange.tsx b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterTimerange.tsx index 9981ed31e61dc..174b08a28c16f 100644 --- a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterTimerange.tsx +++ b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilterTimerange.tsx @@ -9,6 +9,7 @@ import { useDataTable } from '../providers/DataTableProvider' export function DataTableFilterTimerange({ value: _value, presets, + dateRangeDisabled, }: DataTableTimerangeFilterField) { const value = _value as string const { table, columnFilters } = useDataTable() @@ -35,5 +36,7 @@ export function DataTableFilterTimerange({ } } - return + return ( + + ) } diff --git a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilters.utils.ts b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilters.utils.ts index c65b6106b07e6..45eacf1942724 100644 --- a/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilters.utils.ts +++ b/apps/studio/components/ui/DataTable/DataTableFilters/DataTableFilters.utils.ts @@ -218,13 +218,10 @@ export function columnFiltersParser({ const regex = /(\w+):([^]*?)(?=\s+\w+:|$)/g let match - console.log('DataTableFilterCommand parsing input:', inputValue) - while ((match = regex.exec(inputValue)) !== null) { const [_, fieldName, fieldValue] = match if (fieldName && fieldValue) { filterPairs[fieldName] = fieldValue.trim() - console.log(`Parsed filter pair: ${fieldName} = ${fieldValue.trim()}`) } } diff --git a/apps/studio/components/ui/DataTable/DatePickerWithRange.tsx b/apps/studio/components/ui/DataTable/DatePickerWithRange.tsx index 4b03523964219..f8d7f75f13f18 100644 --- a/apps/studio/components/ui/DataTable/DatePickerWithRange.tsx +++ b/apps/studio/components/ui/DataTable/DatePickerWithRange.tsx @@ -22,7 +22,7 @@ import { SelectValue_Shadcn_ as SelectValue, } from 'ui' import { presets as defaultPresets } from './DataTable.constants' -import type { DatePreset } from './DataTable.types' +import type { DatePreset, DateRangeDisabled } from './DataTable.types' import { useDebounce } from './hooks/useDebounce' import { kdbClassName } from './primitives/Kbd' @@ -30,6 +30,7 @@ interface DatePickerWithRangeProps extends HTMLAttributes { date: DateRange | undefined setDate: (date: DateRange | undefined) => void presets?: DatePreset[] + dateRangeDisabled?: DateRangeDisabled } // [Joshen] This might be better placed in ui instead of DataTable since it could be reusable @@ -38,6 +39,7 @@ export function DatePickerWithRange({ date, setDate, presets = defaultPresets, + dateRangeDisabled, }: DatePickerWithRangeProps) { const [open, setOpen] = useState(false) @@ -98,6 +100,8 @@ export function DatePickerWithRange({ selected={date} onSelect={setDate} numberOfMonths={1} + // @ts-ignore + disabled={dateRangeDisabled} />
diff --git a/apps/studio/components/ui/DataTable/FilterSideBar.tsx b/apps/studio/components/ui/DataTable/FilterSideBar.tsx index 18216c9bf8bb2..86562753becf4 100644 --- a/apps/studio/components/ui/DataTable/FilterSideBar.tsx +++ b/apps/studio/components/ui/DataTable/FilterSideBar.tsx @@ -1,9 +1,14 @@ import { cn, ResizablePanel } from 'ui' +import { DateRangeDisabled } from './DataTable.types' import { DataTableFilterControls } from './DataTableFilters/DataTableFilterControls' import { DataTableResetButton } from './DataTableResetButton' import { useDataTable } from './providers/DataTableProvider' -export function FilterSideBar() { +interface FilterSideBarProps { + dateRangeDisabled?: DateRangeDisabled +} + +export function FilterSideBar({ dateRangeDisabled }: FilterSideBarProps) { const { table } = useDataTable() return ( @@ -26,7 +31,7 @@ export function FilterSideBar() {
- +
) diff --git a/apps/studio/data/fdw/fdw-create-mutation.ts b/apps/studio/data/fdw/fdw-create-mutation.ts index c2d203dca6aa5..c096235042105 100644 --- a/apps/studio/data/fdw/fdw-create-mutation.ts +++ b/apps/studio/data/fdw/fdw-create-mutation.ts @@ -20,7 +20,8 @@ export type FDWCreateVariables = { formState: { [k: string]: string } - mode: 'tables' | 'schema' + // If mode is skip, the wrapper will skip the last step, binding the schema/tables to foreign data. This could be done later. + mode: 'tables' | 'schema' | 'skip' tables: any[] sourceSchema: string targetSchema: string @@ -43,9 +44,9 @@ export function getCreateFDWSql({ .join('\n') const createWrapperSql = /* SQL */ ` - create foreign data wrapper ${formState.wrapper_name} - handler ${wrapperMeta.handlerName} - validator ${wrapperMeta.validatorName}; + create foreign data wrapper "${formState.wrapper_name}" + handler "${wrapperMeta.handlerName}" + validator "${wrapperMeta.validatorName}"; ` const encryptedOptions = wrapperMeta.server.options.filter((option) => option.encrypted) @@ -170,7 +171,7 @@ export function getCreateFDWSql({ .join('\n')} execute format( - E'create server ${formState.server_name} foreign data wrapper ${formState.wrapper_name} options (${optionsSqlArray});', + E'create server "${formState.server_name}" foreign data wrapper "${formState.wrapper_name}" options (${optionsSqlArray});', ${encryptedOptions .filter((option) => formState[option.name]) .map((option) => `v_${option.name}`) @@ -233,7 +234,7 @@ export async function createFDW({ projectRef, connectionString, ...rest }: FDWCr return result } -type FDWCreateData = Awaited> +export type FDWCreateData = Awaited> export const useFDWCreateMutation = ({ onSuccess, diff --git a/apps/studio/data/fdw/fdw-import-foreign-schema-mutation.ts b/apps/studio/data/fdw/fdw-import-foreign-schema-mutation.ts new file mode 100644 index 0000000000000..81d257b57e461 --- /dev/null +++ b/apps/studio/data/fdw/fdw-import-foreign-schema-mutation.ts @@ -0,0 +1,78 @@ +import { useMutation, UseMutationOptions, useQueryClient } from '@tanstack/react-query' +import { toast } from 'sonner' + +import { entityTypeKeys } from 'data/entity-types/keys' +import { foreignTableKeys } from 'data/foreign-tables/keys' +import { executeSql } from 'data/sql/execute-sql-query' +import { wrapWithTransaction } from 'data/sql/utils/transaction' +import { vaultSecretsKeys } from 'data/vault/keys' +import type { ResponseError } from 'types' +import { fdwKeys } from './keys' + +export type FDWImportForeignSchemaVariables = { + projectRef?: string + connectionString?: string | null + serverName: string + sourceSchema: string + targetSchema: string +} + +export function getImportForeignSchemaSql({ + serverName, + sourceSchema, + targetSchema, +}: Pick) { + const sql = /* SQL */ ` + import foreign schema "${sourceSchema}" from server ${serverName} into ${targetSchema}; +` + return sql +} + +export async function importForeignSchema({ + projectRef, + connectionString, + ...rest +}: FDWImportForeignSchemaVariables) { + const sql = wrapWithTransaction(getImportForeignSchemaSql(rest)) + const { result } = await executeSql({ projectRef, connectionString, sql }) + return result +} + +type ImportForeignSchemaData = Awaited> + +export const useFDWImportForeignSchemaMutation = ({ + onSuccess, + onError, + ...options +}: Omit< + UseMutationOptions, + 'mutationFn' +> = {}) => { + const queryClient = useQueryClient() + + return useMutation( + (vars) => importForeignSchema(vars), + { + async onSuccess(data, variables, context) { + const { projectRef } = variables + + await Promise.all([ + queryClient.invalidateQueries(fdwKeys.list(projectRef), { refetchType: 'all' }), + queryClient.invalidateQueries(entityTypeKeys.list(projectRef)), + queryClient.invalidateQueries(foreignTableKeys.list(projectRef)), + queryClient.invalidateQueries(vaultSecretsKeys.list(projectRef)), + ]) + + await onSuccess?.(data, variables, context) + }, + async onError(data, variables, context) { + if (onError === undefined) { + toast.error(`Failed to import schema for foreign data wrapper: ${data.message}`) + } else { + onError(data, variables, context) + } + }, + ...options, + } + ) +} diff --git a/apps/studio/data/storage/bucket-create-mutation.ts b/apps/studio/data/storage/bucket-create-mutation.ts index 81dc63d8b17d0..adfde525a43d1 100644 --- a/apps/studio/data/storage/bucket-create-mutation.ts +++ b/apps/studio/data/storage/bucket-create-mutation.ts @@ -6,12 +6,9 @@ import { handleError, post } from 'data/fetchers' import type { ResponseError } from 'types' import { storageKeys } from './keys' -export type BucketCreateVariables = { +export type BucketCreateVariables = Omit & { projectRef: string - id: string isPublic: boolean - file_size_limit: number | null - allowed_mime_types: string[] | null } type CreateStorageBucketBody = components['schemas']['CreateStorageBucketBody'] @@ -19,6 +16,7 @@ type CreateStorageBucketBody = components['schemas']['CreateStorageBucketBody'] export async function createBucket({ projectRef, id, + type, isPublic, file_size_limit, allowed_mime_types, @@ -26,13 +24,15 @@ export async function createBucket({ if (!projectRef) throw new Error('projectRef is required') if (!id) throw new Error('Bucket name is required') - const payload: Partial = { id, public: isPublic } - if (file_size_limit) payload.file_size_limit = file_size_limit - if (allowed_mime_types) payload.allowed_mime_types = allowed_mime_types + const payload: CreateStorageBucketBody = { id, type, public: isPublic } + if (type === 'STANDARD') { + if (file_size_limit) payload.file_size_limit = file_size_limit + if (allowed_mime_types) payload.allowed_mime_types = allowed_mime_types + } const { data, error } = await post('/platform/storage/{ref}/buckets', { params: { path: { ref: projectRef } }, - body: payload as CreateStorageBucketBody, + body: payload, }) if (error) handleError(error) diff --git a/apps/studio/data/storage/bucket-delete-mutation.ts b/apps/studio/data/storage/bucket-delete-mutation.ts index 2333e5839a924..a623863551db1 100644 --- a/apps/studio/data/storage/bucket-delete-mutation.ts +++ b/apps/studio/data/storage/bucket-delete-mutation.ts @@ -3,25 +3,29 @@ import { toast } from 'sonner' import { del, handleError, post } from 'data/fetchers' import type { ResponseError } from 'types' +import { BucketType } from './buckets-query' import { storageKeys } from './keys' export type BucketDeleteVariables = { projectRef: string id: string + type: BucketType } -export async function deleteBucket({ projectRef, id }: BucketDeleteVariables) { +export async function deleteBucket({ projectRef, id, type }: BucketDeleteVariables) { if (!projectRef) throw new Error('projectRef is required') if (!id) throw new Error('Bucket name is requried') - const { error: emptyBucketError } = await post('/platform/storage/{ref}/buckets/{id}/empty', { - params: { path: { ref: projectRef, id } }, - }) - if (emptyBucketError) handleError(emptyBucketError) + if (type !== 'ANALYTICS') { + const { error: emptyBucketError } = await post('/platform/storage/{ref}/buckets/{id}/empty', { + params: { path: { ref: projectRef, id } }, + }) + if (emptyBucketError) handleError(emptyBucketError) + } const { data, error: deleteBucketError } = await del('/platform/storage/{ref}/buckets/{id}', { - params: { path: { ref: projectRef, id } }, - }) + params: { path: { ref: projectRef, id }, query: { type } }, + } as any) if (deleteBucketError) handleError(deleteBucketError) return data } diff --git a/apps/studio/data/storage/buckets-query.ts b/apps/studio/data/storage/buckets-query.ts index 1684a4ca86b97..e26887d1a8fe9 100644 --- a/apps/studio/data/storage/buckets-query.ts +++ b/apps/studio/data/storage/buckets-query.ts @@ -1,5 +1,6 @@ import { useQuery, UseQueryOptions } from '@tanstack/react-query' +import { components } from 'api-types' import { get, handleError } from 'data/fetchers' import { useSelectedProject } from 'hooks/misc/useSelectedProject' import { PROJECT_STATUS } from 'lib/constants' @@ -8,16 +9,9 @@ import { storageKeys } from './keys' export type BucketsVariables = { projectRef?: string } -export type Bucket = { - id: string - name: string - owner: string - public: boolean - created_at: string - updated_at: string - file_size_limit: null | number - allowed_mime_types: null | string[] -} +export type Bucket = components['schemas']['StorageBucketResponse'] + +export type BucketType = Bucket['type'] export async function getBuckets({ projectRef }: BucketsVariables, signal?: AbortSignal) { if (!projectRef) throw new Error('projectRef is required') diff --git a/apps/studio/data/storage/iceberg-namespace-create-mutation.ts b/apps/studio/data/storage/iceberg-namespace-create-mutation.ts new file mode 100644 index 0000000000000..58f93dd4bd4ab --- /dev/null +++ b/apps/studio/data/storage/iceberg-namespace-create-mutation.ts @@ -0,0 +1,89 @@ +import { useMutation, UseMutationOptions, useQueryClient } from '@tanstack/react-query' +import { toast } from 'sonner' + +import { constructHeaders, fetchHandler, handleError } from 'data/fetchers' +import type { ResponseError } from 'types' +import { storageKeys } from './keys' + +type CreateIcebergNamespaceVariables = { + catalogUri: string + warehouse: string + token: string + namespace: string +} + +async function createIcebergNamespace({ + catalogUri, + warehouse, + token, + namespace, +}: CreateIcebergNamespaceVariables) { + const headers = await constructHeaders({ + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }) + + const url = `${catalogUri}/v1/${warehouse}/namespaces`.replaceAll(/(?> + +export const useIcebergNamespaceCreateMutation = ({ + onSuccess, + onError, + ...options +}: Omit< + UseMutationOptions, + 'mutationFn' +> = {}) => { + const queryClient = useQueryClient() + + return useMutation( + (vars) => createIcebergNamespace(vars), + { + async onSuccess(data, variables, context) { + await queryClient.invalidateQueries( + storageKeys.icebergNamespace( + variables.catalogUri, + variables.warehouse, + variables.namespace + ) + ) + await onSuccess?.(data, variables, context) + }, + async onError(data, variables, context) { + if ((data.message = 'Request failed with status code 409')) { + toast.error(`A namespace named ${variables.namespace} already exists in the catalog.`) + return + } + if (onError === undefined) { + toast.error(`Failed to create Iceberg namespace: ${data.message}`) + } else { + onError(data, variables, context) + } + }, + ...options, + } + ) +} diff --git a/apps/studio/data/storage/iceberg-namespace-exists-query.ts b/apps/studio/data/storage/iceberg-namespace-exists-query.ts new file mode 100644 index 0000000000000..e1a4a026bfb9e --- /dev/null +++ b/apps/studio/data/storage/iceberg-namespace-exists-query.ts @@ -0,0 +1,54 @@ +import { UseQueryOptions, useQuery } from '@tanstack/react-query' + +import { constructHeaders, fetchHandler, handleError } from 'data/fetchers' +import type { ResponseError } from 'types' +import { storageKeys } from './keys' + +type ExistsNamespaceVariables = { + catalogUri: string + warehouse: string + token: string + namespace: string +} + +async function checkNamespaceExists({ + catalogUri, + warehouse, + token, + namespace, +}: ExistsNamespaceVariables) { + const headers = await constructHeaders({ + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }) + + const url = `${catalogUri}/v1/${warehouse}/namespaces/${namespace}`.replaceAll(/(?> + +export type IcebergNamespaceExistsError = ResponseError + +export const useIcebergNamespaceExistsQuery = ( + params: ExistsNamespaceVariables, + { + ...options + }: UseQueryOptions = {} +) => { + return useQuery( + storageKeys.icebergNamespace(params.catalogUri, params.warehouse, params.namespace), + () => checkNamespaceExists(params), + { ...options } + ) +} diff --git a/apps/studio/data/storage/iceberg-namespace-tables-query.ts b/apps/studio/data/storage/iceberg-namespace-tables-query.ts new file mode 100644 index 0000000000000..dd52d964d9913 --- /dev/null +++ b/apps/studio/data/storage/iceberg-namespace-tables-query.ts @@ -0,0 +1,65 @@ +import { UseQueryOptions, useQuery } from '@tanstack/react-query' + +import { constructHeaders, fetchHandler, handleError } from 'data/fetchers' +import type { ResponseError } from 'types' +import { storageKeys } from './keys' + +type GetNamespaceTablesVariables = { + catalogUri: string + warehouse: string + token: string + namespace: string +} + +async function getNamespaceTables({ + catalogUri, + warehouse, + token, + namespace, +}: GetNamespaceTablesVariables) { + const headers = await constructHeaders({ + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }) + + const url = `${catalogUri}/v1/${warehouse}/namespaces/${namespace}/tables`.replaceAll( + /(? i.name) + } catch (error) { + handleError(error) + } +} + +type IcebergNamespaceTablesData = Awaited> + +export type IcebergNamespaceTablesError = ResponseError + +export const useIcebergNamespaceTablesQuery = ( + params: GetNamespaceTablesVariables, + { + ...options + }: UseQueryOptions = {} +) => { + return useQuery( + storageKeys.icebergNamespaceTables(params.catalogUri, params.warehouse, params.namespace), + () => getNamespaceTables(params), + { ...options } + ) +} diff --git a/apps/studio/data/storage/iceberg-namespaces-query.ts b/apps/studio/data/storage/iceberg-namespaces-query.ts new file mode 100644 index 0000000000000..8ae16d3446560 --- /dev/null +++ b/apps/studio/data/storage/iceberg-namespaces-query.ts @@ -0,0 +1,54 @@ +import { UseQueryOptions, useQuery } from '@tanstack/react-query' + +import { constructHeaders, fetchHandler, handleError } from 'data/fetchers' +import type { ResponseError } from 'types' +import { storageKeys } from './keys' + +type GetNamespacesVariables = { + catalogUri: string + warehouse: string + token: string +} + +async function getNamespaces({ catalogUri, warehouse, token }: GetNamespacesVariables) { + const headers = await constructHeaders({ + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }) + + const url = `${catalogUri}/v1/${warehouse}/namespaces`.replaceAll(/(?> + +export type IcebergNamespacesError = ResponseError + +export const useIcebergNamespacesQuery = ( + params: GetNamespacesVariables, + { ...options }: UseQueryOptions = {} +) => { + return useQuery( + storageKeys.icebergNamespaces(params.catalogUri, params.warehouse), + () => getNamespaces(params), + { ...options } + ) +} diff --git a/apps/studio/data/storage/iceberg-wrapper-create-mutation.ts b/apps/studio/data/storage/iceberg-wrapper-create-mutation.ts new file mode 100644 index 0000000000000..e82f77ca5370b --- /dev/null +++ b/apps/studio/data/storage/iceberg-wrapper-create-mutation.ts @@ -0,0 +1,80 @@ +import { PermissionAction } from '@supabase/shared-types/out/constants' + +import { WRAPPERS } from 'components/interfaces/Integrations/Wrappers/Wrappers.constants' +import { + useIsProjectActive, + useProjectContext, +} from 'components/layouts/ProjectLayout/ProjectContext' +import { + getCatalogURI, + getConnectionURL, +} from 'components/to-be-cleaned/Storage/StorageSettings/StorageSettings.utils' +import { useProjectSettingsV2Query } from 'data/config/project-settings-v2-query' +import { useProjectStorageConfigQuery } from 'data/config/project-storage-config-query' +import { FDWCreateVariables, useFDWCreateMutation } from 'data/fdw/fdw-create-mutation' +import { useCheckPermissions } from 'hooks/misc/useCheckPermissions' +import { snakeCase } from 'lodash' +import { useS3AccessKeyCreateMutation } from './s3-access-key-create-mutation' + +export const useIcebergWrapperCreateMutation = () => { + const { project } = useProjectContext() + const { data: settings } = useProjectSettingsV2Query({ projectRef: project?.ref }) + const protocol = settings?.app_config?.protocol ?? 'https' + const endpoint = settings?.app_config?.endpoint + + const serviceApiKey = + (settings?.service_api_keys ?? []).find((key) => key.tags === 'service_role')?.api_key ?? + 'SUPABASE_CLIENT_SERVICE_KEY' + + const wrapperMeta = WRAPPERS.find((wrapper) => wrapper.name === 'iceberg_wrapper') + + const isProjectActive = useIsProjectActive() + + const canCreateCredentials = useCheckPermissions(PermissionAction.STORAGE_ADMIN_WRITE, '*') + + const { data: config } = useProjectStorageConfigQuery({ projectRef: project?.ref }) + const isS3ConnectionEnabled = config?.features.s3Protocol.enabled + const disableCreation = !isProjectActive || !canCreateCredentials || !isS3ConnectionEnabled + + const { mutateAsync: createS3AccessKey, isLoading: isCreatingS3AccessKey } = + useS3AccessKeyCreateMutation() + + const { mutateAsync: createFDW, isLoading: isCreatingFDW } = useFDWCreateMutation() + + const mutateAsync = async ({ bucketName }: { bucketName: string }) => { + const createS3KeyData = await createS3AccessKey({ + projectRef: project?.ref, + description: `${snakeCase(bucketName)}_keys`, + }) + + const wrapperName = `${snakeCase(bucketName)}_fdw` + + const params: FDWCreateVariables = { + projectRef: project?.ref, + connectionString: project?.connectionString, + wrapperMeta: wrapperMeta!, + formState: { + wrapper_name: wrapperName, + server_name: `${wrapperName}_server`, + vault_aws_access_key_id: createS3KeyData?.access_key, + vault_aws_secret_access_key: createS3KeyData?.secret_key, + vault_token: serviceApiKey, + warehouse: bucketName, + 's3.endpoint': getConnectionURL(project?.ref ?? '', protocol, endpoint), + catalog_uri: getCatalogURI(project?.ref ?? '', protocol, endpoint), + }, + mode: 'skip', + tables: [], + sourceSchema: '', + targetSchema: '', + } + + await createFDW(params) + } + + return { + mutateAsync, + isLoading: isCreatingFDW || isCreatingS3AccessKey, + hasPermission: canCreateCredentials, + } +} diff --git a/apps/studio/data/storage/keys.ts b/apps/studio/data/storage/keys.ts index 5fa399773f44c..e68911d547c71 100644 --- a/apps/studio/data/storage/keys.ts +++ b/apps/studio/data/storage/keys.ts @@ -1,4 +1,10 @@ export const storageKeys = { buckets: (projectRef: string | undefined) => ['projects', projectRef, 'buckets'] as const, archive: (projectRef: string | undefined) => ['projects', projectRef, 'archive'] as const, + icebergNamespaces: (catalog: string, warehouse: string) => + ['catalog', catalog, 'warehouse', warehouse, 'namespaces'] as const, + icebergNamespace: (catalog: string, warehouse: string, namespace: string) => + ['catalog', catalog, 'warehouse', warehouse, 'namespaces', namespace] as const, + icebergNamespaceTables: (catalog: string, warehouse: string, namespace: string) => + ['catalog', catalog, 'warehouse', warehouse, 'namespaces', namespace, 'tables'] as const, } diff --git a/apps/studio/data/vault/vault-secret-decrypted-value-query.ts b/apps/studio/data/vault/vault-secret-decrypted-value-query.ts index b7009f02165c6..7b1235c206def 100644 --- a/apps/studio/data/vault/vault-secret-decrypted-value-query.ts +++ b/apps/studio/data/vault/vault-secret-decrypted-value-query.ts @@ -1,5 +1,5 @@ -import { UseQueryOptions, useQuery } from '@tanstack/react-query' import { Query } from '@supabase/pg-meta/src/query' +import { UseQueryOptions, useQuery } from '@tanstack/react-query' import { executeSql } from '../sql/execute-sql-query' import { vaultSecretsKeys } from './keys' @@ -33,25 +33,26 @@ export const getDecryptedValue = async ( }, signal ) - return result + return result as { decrypted_secret: string }[] } +type getDecryptedValueResult = Awaited> export type VaultSecretsDecryptedValueData = string export type VaultSecretsDecryptedValueError = unknown -export const useVaultSecretDecryptedValueQuery = ( +export const useVaultSecretDecryptedValueQuery = ( { projectRef, connectionString, id }: VaultSecretsDecryptedValueVariables, { enabled = true, ...options - }: UseQueryOptions = {} + }: UseQueryOptions = {} ) => - useQuery( + useQuery( vaultSecretsKeys.getDecryptedValue(projectRef, id), ({ signal }) => getDecryptedValue({ projectRef, connectionString, id }, signal), { select(data) { - return (data[0] as any).decrypted_secret + return (data[0]?.decrypted_secret ?? '') as TData }, enabled: enabled && typeof projectRef !== 'undefined', ...options, diff --git a/apps/studio/pages/api/platform/projects/[ref]/analytics/endpoints/[name].ts b/apps/studio/pages/api/platform/projects/[ref]/analytics/endpoints/[name].ts index b18473d013a8a..1646996989fab 100644 --- a/apps/studio/pages/api/platform/projects/[ref]/analytics/endpoints/[name].ts +++ b/apps/studio/pages/api/platform/projects/[ref]/analytics/endpoints/[name].ts @@ -1,5 +1,5 @@ -import { NextApiRequest, NextApiResponse } from 'next' import apiWrapper from 'lib/api/apiWrapper' +import { NextApiRequest, NextApiResponse } from 'next' import { PROJECT_ANALYTICS_URL } from 'pages/api/constants' export default (req: NextApiRequest, res: NextApiResponse) => apiWrapper(req, res, handler) @@ -9,6 +9,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { switch (method) { case 'GET': + case 'POST': const missingEnvVars = [ process.env.LOGFLARE_PRIVATE_ACCESS_TOKEN ? null : 'LOGFLARE_PRIVATE_ACCESS_TOKEN', process.env.LOGFLARE_URL ? null : 'LOGFLARE_URL', @@ -23,15 +24,26 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { } default: - res.setHeader('Allow', ['GET']) + res.setHeader('Allow', ['GET', 'POST']) res.status(405).json({ data: null, error: { message: `Method ${method} Not Allowed` } }) } } const proxyRequest = async (req: NextApiRequest) => { const { name, ...toForward } = req.query - const payload = { ...toForward, project_tier: 'ENTERPRISE' } - const search = '?' + new URLSearchParams(payload as any).toString() + const project_tier = 'ENTERPRISE' + + if (req.method === 'GET') { + const payload = { ...toForward, project_tier } + return retrieveAnalyticsData(name as string, payload) + } else if (req.method === 'POST') { + const payload = { ...req.body, project_tier } + return retrieveAnalyticsData(name as string, payload) + } +} + +const retrieveAnalyticsData = async (name: string, payload: any) => { + const search = '?' + new URLSearchParams(payload).toString() const apiKey = process.env.LOGFLARE_PRIVATE_ACCESS_TOKEN const url = `${PROJECT_ANALYTICS_URL}endpoints/query/${name}${search}` const result = await fetch(url, { diff --git a/apps/studio/pages/project/[ref]/branches/merge-requests.tsx b/apps/studio/pages/project/[ref]/branches/merge-requests.tsx index 7aed9e003b4e4..fa7ae63947dcf 100644 --- a/apps/studio/pages/project/[ref]/branches/merge-requests.tsx +++ b/apps/studio/pages/project/[ref]/branches/merge-requests.tsx @@ -25,7 +25,7 @@ import { useGitHubConnectionsQuery } from 'data/integrations/github-connections- import { useCheckPermissions } from 'hooks/misc/useCheckPermissions' import { useSelectedOrganization } from 'hooks/misc/useSelectedOrganization' import { useSelectedProject } from 'hooks/misc/useSelectedProject' -import { useFlag } from 'hooks/ui/useFlag' +import { useIsBranching2Enabled } from 'components/interfaces/App/FeaturePreview/FeaturePreviewContext' import type { NextPageWithLayout } from 'types' import { Button, @@ -42,7 +42,7 @@ const MergeRequestsPage: NextPageWithLayout = () => { const { ref } = useParams() const project = useSelectedProject() const selectedOrg = useSelectedOrganization() - const gitlessBranching = useFlag('gitlessBranching') + const gitlessBranching = useIsBranching2Enabled() const isBranch = project?.parent_project_ref !== undefined const projectRef = @@ -282,7 +282,7 @@ const MergeRequestsPageWrapper = ({ children }: PropsWithChildren<{}>) => { const router = useRouter() const { ref } = useParams() const project = useSelectedProject() - const gitlessBranching = useFlag('gitlessBranching') + const gitlessBranching = useIsBranching2Enabled() const isBranch = project?.parent_project_ref !== undefined const projectRef = diff --git a/apps/studio/pages/project/[ref]/merge.tsx b/apps/studio/pages/project/[ref]/merge.tsx index 2f8d10228cf9b..9cd7be109287b 100644 --- a/apps/studio/pages/project/[ref]/merge.tsx +++ b/apps/studio/pages/project/[ref]/merge.tsx @@ -1,13 +1,5 @@ import dayjs from 'dayjs' -import { - AlertTriangle, - ExternalLink, - GitBranchIcon, - GitMerge, - MoreVertical, - Shield, - X, -} from 'lucide-react' +import { AlertTriangle, GitBranchIcon, GitMerge, MoreVertical, Shield, X } from 'lucide-react' import Link from 'next/link' import { useRouter } from 'next/router' import { useCallback, useEffect, useMemo, useState } from 'react' @@ -33,7 +25,6 @@ import { useBranchesQuery } from 'data/branches/branches-query' import { useBranchMergeDiff } from 'hooks/branches/useBranchMergeDiff' import { useWorkflowManagement } from 'hooks/branches/useWorkflowManagement' import { useProjectByRef, useSelectedProject } from 'hooks/misc/useSelectedProject' -import { useFlag } from 'hooks/ui/useFlag' import type { NextPageWithLayout } from 'types' import { Badge, @@ -53,8 +44,6 @@ const MergePage: NextPageWithLayout = () => { const { ref } = useParams() const project = useSelectedProject() - const gitlessBranching = useFlag('gitlessBranching') - const [isSubmitting, setIsSubmitting] = useState(false) const [workflowFinalStatus, setWorkflowFinalStatus] = useState(null) const [showConfirmDialog, setShowConfirmDialog] = useState(false) @@ -314,33 +303,6 @@ const MergePage: NextPageWithLayout = () => { setWorkflowFinalStatus(null) }, [currentWorkflowRunId]) - if (!gitlessBranching) { - return ( - - -
- -

- The branch merge feature is currently in development and will be available soon. -

-
- -
-
-
-
-
- ) - } - // If not on a preview branch or branch info unavailable, show notice if (!isBranch || !currentBranch) { return ( @@ -349,7 +311,7 @@ const MergePage: NextPageWithLayout = () => {

- This page is only available for preview branches. + You can only review changes when on a preview branch

diff --git a/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx b/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx index 24e88ca1ade9b..39cf2671d820f 100644 --- a/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx +++ b/apps/studio/pages/project/[ref]/storage/buckets/[bucketId].tsx @@ -5,6 +5,7 @@ import { useProjectContext } from 'components/layouts/ProjectLayout/ProjectConte import StorageBucketsError from 'components/layouts/StorageLayout/StorageBucketsError' import StorageLayout from 'components/layouts/StorageLayout/StorageLayout' import { StorageExplorer } from 'components/to-be-cleaned/Storage' +import { AnalyticBucketDetails } from 'components/to-be-cleaned/Storage/AnalyticBucketDetails' import { useSelectedBucket } from 'components/to-be-cleaned/Storage/StorageExplorer/useSelectedBucket' import { useStorageExplorerStateSnapshot } from 'state/storage-explorer' import type { NextPageWithLayout } from 'types' @@ -18,7 +19,7 @@ const PageLayout: NextPageWithLayout = () => { if (!project || !projectRef) return null return ( -
+
{isError && } {isSuccess ? ( @@ -26,6 +27,8 @@ const PageLayout: NextPageWithLayout = () => {

Bucket {bucketId} cannot be found

+ ) : bucket.type === 'ANALYTICS' ? ( + ) : ( ) diff --git a/apps/studio/public/img/previews/branching-preview.png b/apps/studio/public/img/previews/branching-preview.png new file mode 100644 index 0000000000000..91b5435f1117a Binary files /dev/null and b/apps/studio/public/img/previews/branching-preview.png differ diff --git a/apps/www/_blog/2025-07-15-analytics-buckets.mdx b/apps/www/_blog/2025-07-15-analytics-buckets.mdx new file mode 100644 index 0000000000000..143ab2af2890d --- /dev/null +++ b/apps/www/_blog/2025-07-15-analytics-buckets.mdx @@ -0,0 +1,154 @@ +--- +title: 'Supabase Analytics Buckets with Iceberg Support' +description: 'Analytics buckets optimized for large-scale data analysis with Apache Iceberg support.' +categories: + - product + - launch-week +tags: + - launch-week + - storage +date: '2025-07-15:10:00' +toc_depth: 3 +author: oli_rice,fabrizio +image: launch-week-15/day-2-analytics-buckets/og.jpg +thumb: launch-week-15/day-2-analytics-buckets/thumb.png +launchweek: 15 +--- + +Today we're launching **Supabase Analytics Buckets** in private alpha. These are a new kind of storage bucket optimized for analytics, with built-in support for the [Apache Iceberg](https://iceberg.apache.org/) table format. + +Analytics buckets are integrated into Supabase Studio, power table-level views instead of raw files, and can be queried using the new **Supabase Iceberg Wrapper**, also launching in alpha. + +
+