diff --git a/apps/studio/components/interfaces/APIKeys/ApiKeyPill.tsx b/apps/studio/components/interfaces/APIKeys/ApiKeyPill.tsx index a7f16a5c8899b..6024d01772060 100644 --- a/apps/studio/components/interfaces/APIKeys/ApiKeyPill.tsx +++ b/apps/studio/components/interfaces/APIKeys/ApiKeyPill.tsx @@ -79,7 +79,7 @@ export function ApiKeyPill({ async function onCopy() { // If key is already revealed, use that value - if (data?.api_key) return data?.api_key + if (data?.api_key) return data?.api_key ?? '' try { // Fetch full key and immediately clear from cache after copying @@ -89,13 +89,15 @@ export function ApiKeyPill({ exact: true, }) - if (result.isSuccess) return result.data.api_key + if (result.isSuccess) return result.data.api_key ?? '' if (error) { toast.error('Failed to copy secret API key') + return '' } } catch (error) { console.error('Failed to fetch API key:', error) + return '' } // Fallback to the masked version if fetch fails diff --git a/apps/studio/components/interfaces/Reports/ReportChart.tsx b/apps/studio/components/interfaces/Reports/ReportChart.tsx new file mode 100644 index 0000000000000..8cfe89816f05d --- /dev/null +++ b/apps/studio/components/interfaces/Reports/ReportChart.tsx @@ -0,0 +1,58 @@ +/** + * ReportChart + * + * A wrapper component that uses the useChartData hook to fetch data for a chart + * and then passes the data and loading state to the ComposedChartHandler. + * + * This component acts as a bridge between the data-fetching logic and the + * presentational chart component. + */ +import ComposedChartHandler from 'components/ui/Charts/ComposedChartHandler' +import { useChartData } from 'hooks/useChartData' +import type { UpdateDateRange } from 'pages/project/[ref]/reports/database' +import type { MultiAttribute } from 'components/ui/Charts/ComposedChart.utils' + +const ReportChart = ({ + chart, + startDate, + endDate, + interval, + updateDateRange, +}: { + chart: any + startDate: string + endDate: string + interval: string + updateDateRange: UpdateDateRange +}) => { + const { + data, + isLoading: isLoading, + chartAttributes, + highlightedValue, + } = useChartData({ + attributes: chart.attributes, + startDate, + endDate, + interval, + data: undefined, + highlightedValue: + chart.id === 'client-connections' || chart.id === 'pgbouncer-connections' + ? true + : chart.showMaxValue, + }) + + return ( + 0 ? chartAttributes : chart.attributes) as MultiAttribute[] + } + data={data} + isLoading={isLoading} + highlightedValue={highlightedValue as any} + updateDateRange={updateDateRange} + /> + ) +} +export default ReportChart diff --git a/apps/studio/components/interfaces/Settings/General/Infrastructure/ProjectUpgradeAlert/ProjectUpgradeAlert.tsx b/apps/studio/components/interfaces/Settings/General/Infrastructure/ProjectUpgradeAlert/ProjectUpgradeAlert.tsx index 5f485beaebfec..c3ff46c0ac511 100644 --- a/apps/studio/components/interfaces/Settings/General/Infrastructure/ProjectUpgradeAlert/ProjectUpgradeAlert.tsx +++ b/apps/studio/components/interfaces/Settings/General/Infrastructure/ProjectUpgradeAlert/ProjectUpgradeAlert.tsx @@ -187,6 +187,7 @@ const ProjectUpgradeAlert = () => { [right-sized](https://supabase.com/docs/guides/platform/upgrading#disk-sizing) with the upgrade.`} /> )} + {/* @ts-ignore */} {(data?.potential_breaking_changes ?? []).length > 0 && ( diff --git a/apps/studio/components/interfaces/Settings/Infrastructure/InfrastructureInfo.tsx b/apps/studio/components/interfaces/Settings/Infrastructure/InfrastructureInfo.tsx index 6cb810871b55e..ca84227d92f5b 100644 --- a/apps/studio/components/interfaces/Settings/Infrastructure/InfrastructureInfo.tsx +++ b/apps/studio/components/interfaces/Settings/Infrastructure/InfrastructureInfo.tsx @@ -19,7 +19,6 @@ import { AlertTitle_Shadcn_, Alert_Shadcn_, Badge, - Button, Input, Tooltip, TooltipContent, @@ -27,6 +26,13 @@ import { } from 'ui' import { ProjectUpgradeAlert } from '../General/Infrastructure/ProjectUpgradeAlert' import InstanceConfiguration from './InfrastructureConfiguration/InstanceConfiguration' +import { + DatabaseExtensionsWarning, + ObjectsToBeDroppedWarning, + ReadReplicasWarning, + UnsupportedExtensionsWarning, + UserDefinedObjectsInInternalSchemasWarning, +} from './UpgradeWarnings' const InfrastructureInfo = () => { const { ref } = useParams() @@ -73,6 +79,12 @@ const InfrastructureInfo = () => { const isInactive = project?.status === 'INACTIVE' const hasReadReplicas = (databases ?? []).length > 1 + // @ts-ignore [Bobbie] to be removed after 2025-06-30 prod deploy + const hasExtensionDependentObjects = (data?.extension_dependent_objects ?? []).length > 0 + const hasObjectsToBeDropped = (data?.objects_to_be_dropped ?? []).length > 0 + const hasUnsupportedExtensions = (data?.unsupported_extensions || []).length > 0 + const hasObjectsInternalSchema = (data?.user_defined_objects_in_internal_schemas || []).length > 0 + return ( <> @@ -181,149 +193,38 @@ const InfrastructureInfo = () => { )} - {data?.eligible && !hasReadReplicas && } - {data.eligible && hasReadReplicas && ( - - - A new version of Postgres is available for your project - - - You will need to remove all read replicas prior to upgrading your Postgres - version to the latest available ({latestPgVersion}). - - - )} - {/* TODO(bobbie): once extension_dependent_objects is removed on the backend, remove this block and the ts-ignores below */} - {!data?.eligible && (data?.extension_dependent_objects || []).length > 0 && ( - - - A new version of Postgres is available - - -
-

- You'll need to remove the following extensions before upgrading: -

- -
    - {(data?.extension_dependent_objects || []).map((obj) => ( -
  • - {obj} -
  • - ))} -
-
-

- {projectUpgradeEligibilityData?.potential_breaking_changes?.includes( - 'pg17_upgrade_unsupported_extensions' - ) - ? 'These extensions are not supported in newer versions of Supabase Postgres. If you are not using them, it is safe to remove them.' - : 'Check the docs for which ones might need to be removed.'} -

- -
-
- )} - {!data?.eligible && - // @ts-ignore - (data?.objects_to_be_dropped || []).length > 0 && ( - - - A new version of Postgres is available - - -
-

- You'll need to remove the following objects before upgrading: -

- -
    - { - // @ts-ignore - (data?.objects_to_be_dropped || []).map((obj: string) => ( -
  • - {obj} -
  • - )) - } -
-
-

Check the docs for which objects need to be removed.

- -
-
- )} - {!data?.eligible && - // @ts-ignore - (data?.unsupported_extensions || []).length > 0 && ( - - - A new version of Postgres is available - - -
-

- You'll need to remove the following extensions before upgrading: -

+ {data.eligible ? ( + hasReadReplicas ? ( + + ) : ( + + ) + ) : null} -
    - { - // @ts-ignore - (data?.unsupported_extensions || []).map((obj: string) => ( -
  • - {obj} -
  • - )) - } -
-
-

- These extensions are not supported in newer versions of Supabase - Postgres. If you are not using them, it is safe to remove them. -

- -
-
- )} + {!data.eligible ? ( + hasExtensionDependentObjects ? ( + + ) : hasObjectsToBeDropped ? ( + + ) : hasUnsupportedExtensions ? ( + + ) : hasObjectsInternalSchema ? ( + + ) : null + ) : null} )} diff --git a/apps/studio/components/interfaces/Settings/Infrastructure/UpgradeWarnings.tsx b/apps/studio/components/interfaces/Settings/Infrastructure/UpgradeWarnings.tsx new file mode 100644 index 0000000000000..4f4e9d8679a08 --- /dev/null +++ b/apps/studio/components/interfaces/Settings/Infrastructure/UpgradeWarnings.tsx @@ -0,0 +1,185 @@ +import { Alert_Shadcn_, AlertDescription_Shadcn_, AlertTitle_Shadcn_, Button } from 'ui' + +export const ReadReplicasWarning = ({ latestPgVersion }: { latestPgVersion: string }) => { + return ( + + + A new version of Postgres is available for your project + + + You will need to remove all read replicas prior to upgrading your Postgres version to the + latest available ({latestPgVersion}). + + + ) +} + +export const DatabaseExtensionsWarning = ({ + extensions, + potentialBreakingChanges, +}: { + extensions: string[] + potentialBreakingChanges?: string[] +}) => { + return ( + + A new version of Postgres is available + +
+

You'll need to remove the following extensions before upgrading:

+ +
    + {extensions.map((obj) => ( +
  • + {obj} +
  • + ))} +
+
+

+ {potentialBreakingChanges?.includes('pg17_upgrade_unsupported_extensions') + ? 'These extensions are not supported in newer versions of Supabase Postgres. If you are not using them, it is safe to remove them.' + : 'Check the docs for which ones might need to be removed.'} +

+ +
+
+ ) +} + +export const ObjectsToBeDroppedWarning = ({ + objectsToBeDropped, +}: { + objectsToBeDropped: string[] +}) => { + return ( + + A new version of Postgres is available + +
+

You'll need to remove the following objects before upgrading:

+ +
    + {objectsToBeDropped.map((obj) => ( +
  • + {obj} +
  • + ))} +
+
+

Check the docs for which objects need to be removed.

+ +
+
+ ) +} + +export const UnsupportedExtensionsWarning = ({ + unsupportedExtensions, +}: { + unsupportedExtensions: string[] +}) => { + return ( + + A new version of Postgres is available + +
+

You'll need to remove the following extensions before upgrading:

+ +
    + {unsupportedExtensions.map((obj: string) => ( +
  • + {obj} +
  • + ))} +
+
+

+ These extensions are not supported in newer versions of Supabase Postgres. If you are not + using them, it is safe to remove them. +

+ +
+
+ ) +} + +export const UserDefinedObjectsInInternalSchemasWarning = ({ objects }: { objects: string[] }) => { + return ( + + A new version of Postgres is available + +
+

+ You'll need to move these objects out of auth/realtime/storage schemas before upgrading: +

+ +
    + {objects.map((obj: string) => ( +
  • + {obj} +
  • + ))} +
+
+

+ These schemas are Supabase-managed and creating custom objects in them is no longer + supported. Check the changelog to see how to move them to your own schemas. +

+ +
+
+ ) +} diff --git a/apps/studio/components/layouts/ReportsLayout/ReportsMenu.tsx b/apps/studio/components/layouts/ReportsLayout/ReportsMenu.tsx index ce1e4ab126792..1afb9f51a7a94 100644 --- a/apps/studio/components/layouts/ReportsLayout/ReportsMenu.tsx +++ b/apps/studio/components/layouts/ReportsLayout/ReportsMenu.tsx @@ -18,6 +18,7 @@ import { useProfile } from 'lib/profile' import { Menu, cn } from 'ui' import ConfirmationModal from 'ui-patterns/Dialogs/ConfirmationModal' import { ReportMenuItem } from './ReportMenuItem' +import { useFlag } from 'hooks/ui/useFlag' const ReportsMenu = () => { const router = useRouter() @@ -25,6 +26,7 @@ const ReportsMenu = () => { const { ref, id } = useParams() const pageKey = (id || router.pathname.split('/')[4]) as string const storageEnabled = useIsFeatureEnabled('project_storage:all') + const authEnabled = useFlag('authreportv2') const canCreateCustomReport = useCheckPermissions(PermissionAction.CREATE, 'user_content', { resource: { type: 'report', owner_id: profile?.id }, @@ -97,21 +99,29 @@ const ReportsMenu = () => { key: 'api-overview', url: `/project/${ref}/reports/api-overview`, }, - ...(storageEnabled + ...(authEnabled ? [ { - name: 'Storage', - key: 'storage', - url: `/project/${ref}/reports/storage`, + name: 'Auth', + key: 'auth', + url: `/project/${ref}/reports/auth`, }, ] : []), - { name: 'Database', key: 'database', url: `/project/${ref}/reports/database`, }, + ...(storageEnabled + ? [ + { + name: 'Storage', + key: 'storage', + url: `/project/${ref}/reports/storage`, + }, + ] + : []), ], }, ] diff --git a/apps/studio/components/ui/Charts/ComposedChart.tsx b/apps/studio/components/ui/Charts/ComposedChart.tsx index 0c36f49f72e14..a1d501fd65552 100644 --- a/apps/studio/components/ui/Charts/ComposedChart.tsx +++ b/apps/studio/components/ui/Charts/ComposedChart.tsx @@ -28,13 +28,9 @@ import { } from './Charts.constants' import { CommonChartProps, Datum } from './Charts.types' import { numberFormatter, useChartSize } from './Charts.utils' -import { - calculateTotalChartAggregate, - CustomLabel, - CustomTooltip, - type MultiAttribute, -} from './ComposedChart.utils' +import { calculateTotalChartAggregate, CustomLabel, CustomTooltip } from './ComposedChart.utils' import NoDataPlaceholder from './NoDataPlaceholder' +import { MultiAttribute } from './ComposedChart.utils' import { ChartHighlight } from './useChartHighlight' import { formatBytes } from 'lib/helpers' @@ -58,6 +54,7 @@ export interface ComposedChartProps extends CommonChartProps { chartStyle?: string onChartStyleChange?: (style: string) => void updateDateRange: any + titleTooltip?: string hideYAxis?: boolean hideHighlightedValue?: boolean syncId?: string @@ -114,6 +111,22 @@ export default function ComposedChart({ const { Container } = useChartSize(size) + const day = (value: number | string) => (displayDateInUtc ? dayjs(value).utc() : dayjs(value)) + + const formatTimestamp = (ts: unknown) => { + if (typeof ts !== 'number' && typeof ts !== 'string') { + return '' + } + + // Timestamps from auth logs can be in microseconds + if (typeof ts === 'number' && ts > 1e14) { + return day(ts / 1000).format(customDateFormat) + } + + // dayjs can handle ISO strings and millisecond numbers + return day(ts).format(customDateFormat) + } + // Default props const _XAxisProps = XAxisProps || { interval: data.length - 2, @@ -127,18 +140,19 @@ export default function ComposedChart({ width: 0, } - const day = (value: number | string) => (displayDateInUtc ? dayjs(value).utc() : dayjs(value)) - function getHeaderLabel() { if (!xAxisIsDate) { if (!focusDataIndex) return highlightedLabel - return data[focusDataIndex]?.timestamp + return data[focusDataIndex]?.[xAxisKey] } return ( (focusDataIndex !== null && data && data[focusDataIndex] !== undefined && - day(data[focusDataIndex].timestamp).format(customDateFormat)) || + (() => { + const ts = data[focusDataIndex][xAxisKey] + return formatTimestamp(ts) + })()) || highlightedLabel ) } @@ -149,13 +163,45 @@ export default function ComposedChart({ color: CHART_COLORS.REFERENCE_LINE, } + const chartData = + data && !!data[0] + ? Object.entries(data[0]) + ?.map(([key, value]) => ({ + name: key, + value: value, + })) + .filter( + (att) => + att.name !== 'timestamp' && + att.name !== 'period_start' && + att.name !== maxAttribute?.attribute && + attributes.some((attr) => attr.attribute === att.name && attr.enabled !== false) + ) + .map((att, index) => { + const attribute = attributes.find((attr) => attr.attribute === att.name) + return { + ...att, + color: attribute?.color + ? resolvedTheme?.includes('dark') + ? attribute.color.dark + : attribute.color.light + : STACKED_CHART_COLORS[index % STACKED_CHART_COLORS.length], + } + }) + : [] + const lastDataPoint = !!data[data.length - 1] ? Object.entries(data[data.length - 1]) .map(([key, value]) => ({ dataKey: key, value: value as number, })) - .filter((entry) => entry.dataKey !== 'timestamp') + .filter( + (entry) => + entry.dataKey !== 'timestamp' && + entry.dataKey !== 'period_start' && + attributes.some((attr) => attr.attribute === entry.dataKey && attr.enabled !== false) + ) : undefined const referenceLines = attributes.filter((attribute) => attribute?.provider === 'reference-line') @@ -184,32 +230,17 @@ export default function ComposedChart({ chartHighlight?.coordinates.right && chartHighlight?.coordinates.left !== chartHighlight?.coordinates.right - const chartData = - data && !!data[0] - ? Object.entries(data[0]) - ?.map(([key, value], index) => ({ - name: key, - value: value, - color: STACKED_CHART_COLORS[index - (1 % STACKED_CHART_COLORS.length)], - })) - .filter( - (att) => - att.name !== 'timestamp' && - att.name !== maxAttribute?.attribute && - !referenceLines.map((a) => a.attribute).includes(att.name) - ) - : [] - const stackedAttributes = chartData.filter((att) => !att.name.includes('max')) const isPercentage = format === '%' const isRamChart = chartData?.some((att: any) => att.name.toLowerCase().includes('ram_')) const isDiskSpaceChart = chartData?.some((att: any) => att.name.toLowerCase().includes('disk_space_') ) - const isDiskSizeChart = chartData?.some((att: any) => att.name.toLowerCase().includes('disk_fs_')) + const isDBSizeChart = chartData?.some((att: any) => + att.name.toLowerCase().includes('pg_database_size') + ) const isNetworkChart = chartData?.some((att: any) => att.name.toLowerCase().includes('network_')) - const shouldFormatBytes = isRamChart || isDiskSpaceChart || isDiskSizeChart || isNetworkChart - + const shouldFormatBytes = isRamChart || isDiskSpaceChart || isDBSizeChart || isNetworkChart //* // Set the y-axis domain // to the highest value in the chart data for percentage charts @@ -387,6 +418,7 @@ export default function ComposedChart({ y={line.value} strokeWidth={1} {...line} + color={line.color?.dark} strokeDasharray={line.strokeDasharray ?? '3 3'} label={undefined} > @@ -418,13 +450,11 @@ export default function ComposedChart({ className="text-foreground-lighter -mt-9 flex items-center justify-between text-xs" style={{ marginLeft: YAxisProps?.width }} > - - {xAxisIsDate ? day(data[0]?.timestamp).format(customDateFormat) : data[0]?.timestamp} - + {xAxisIsDate ? formatTimestamp(data[0]?.[xAxisKey]) : data[0]?.[xAxisKey]} {xAxisIsDate - ? day(data[data?.length - 1]?.timestamp).format(customDateFormat) - : data[data?.length - 1]?.timestamp} + ? formatTimestamp(data[data.length - 1]?.[xAxisKey]) + : data[data.length - 1]?.[xAxisKey]} )} diff --git a/apps/studio/components/ui/Charts/ComposedChart.utils.tsx b/apps/studio/components/ui/Charts/ComposedChart.utils.tsx index 95444d5223059..831f4b6153ad9 100644 --- a/apps/studio/components/ui/Charts/ComposedChart.utils.tsx +++ b/apps/studio/components/ui/Charts/ComposedChart.utils.tsx @@ -31,13 +31,19 @@ export interface ReportAttributes { hideHighlightedValue?: boolean } -type Provider = 'infra-monitoring' | 'daily-stats' | 'reference-line' | 'combine' +export type Provider = 'infra-monitoring' | 'daily-stats' | 'mock' | 'reference-line' | 'logs' export type MultiAttribute = { attribute: string provider: Provider label?: string - color?: string + color?: { + light: string + dark: string + } + statusCode?: string + grantType?: string + providerType?: string stackId?: string format?: string description?: string @@ -65,6 +71,7 @@ export type MultiAttribute = { strokeDasharray?: string className?: string hide?: boolean + enabled?: boolean } interface CustomIconProps { @@ -167,7 +174,7 @@ const CustomTooltip = ({ return (
{getIcon(entry.color, isMax)} - + {attribute?.label || entry.name} @@ -194,9 +201,12 @@ const CustomTooltip = ({ >

{dayjs(timestamp).format(DateTimeFormats.FULL_SECONDS)}

- {payload.reverse().map((entry: any, index: number) => ( - - ))} + {payload + .reverse() + .filter((entry: any) => entry.value !== 0) + .map((entry: any, index: number) => ( + + ))} {active && showTotal && (
Total @@ -270,7 +280,7 @@ const CustomLabel = ({ payload, attributes, showMaxValue, onLabelHover }: Custom {getIcon(entry.name, entry.color)} diff --git a/apps/studio/components/ui/Charts/ComposedChartHandler.tsx b/apps/studio/components/ui/Charts/ComposedChartHandler.tsx index 1c90a17dd37a4..84f500221999d 100644 --- a/apps/studio/components/ui/Charts/ComposedChartHandler.tsx +++ b/apps/studio/components/ui/Charts/ComposedChartHandler.tsx @@ -1,5 +1,11 @@ -import React, { PropsWithChildren, useState, useMemo, useEffect, useRef } from 'react' -import { useRouter } from 'next/router' +/** + * ComposedChartHandler + * + * A presentational component for rendering charts. + * It is responsible only for rendering the chart UI based on the data and loading state passed to it as props. + * All the complex data fetching logic has been moved to the useChartData hook. + */ +import React, { PropsWithChildren, useState, useEffect, useRef } from 'react' import { Loader2 } from 'lucide-react' import { cn, WarningIcon } from 'ui' @@ -11,14 +17,14 @@ import { InfraMonitoringAttribute } from 'data/analytics/infra-monitoring-query' import { useInfraMonitoringQueries } from 'data/analytics/infra-monitoring-queries' import { ProjectDailyStatsAttribute } from 'data/analytics/project-daily-stats-query' import { useProjectDailyStatsQueries } from 'data/analytics/project-daily-stats-queries' -import { useDatabaseSelectorStateSnapshot } from 'state/database-selector' import { useChartHighlight } from './useChartHighlight' +import { getMockDataForAttribute } from 'data/reports/auth-charts' import type { ChartData } from './Charts.types' import type { UpdateDateRange } from 'pages/project/[ref]/reports/database' -import { MultiAttribute } from './ComposedChart.utils' +import type { MultiAttribute } from './ComposedChart.utils' -export interface ComposedChartHandlerProps { +interface ComposedChartHandlerProps { id?: string label: string attributes: MultiAttribute[] @@ -40,6 +46,7 @@ export interface ComposedChartHandlerProps { updateDateRange: UpdateDateRange valuePrecision?: number isVisible?: boolean + titleTooltip?: string docsUrl?: string } @@ -91,9 +98,6 @@ const LazyChartWrapper = ({ children }: PropsWithChildren) => { const ComposedChartHandler = ({ label, attributes, - startDate, - endDate, - interval, customDateFormat, children = null, defaultChartStyle = 'bar', @@ -109,131 +113,14 @@ const ComposedChartHandler = ({ showTotal, updateDateRange, valuePrecision, - isVisible = true, + titleTooltip, id, ...otherProps }: PropsWithChildren) => { - const router = useRouter() - const { ref } = router.query - - const state = useDatabaseSelectorStateSnapshot() const [chartStyle, setChartStyle] = useState(defaultChartStyle) const chartHighlight = useChartHighlight() - const databaseIdentifier = state.selectedDatabaseId - - // Use the custom hook at the top level of the component - const attributeQueries = useAttributeQueries( - attributes, - ref, - startDate, - endDate, - interval as AnalyticsInterval, - databaseIdentifier, - data, - isVisible - ) - - // Combine all the data into a single dataset - const combinedData = useMemo(() => { - if (data) return data - - const isLoading = attributeQueries.some((query: any) => query.isLoading) - if (isLoading) return undefined - - const hasError = attributeQueries.some((query: any) => !query.data) - if (hasError) return undefined - - // Get all unique timestamps from all datasets - const timestamps = new Set() - attributeQueries.forEach((query: any) => { - query.data?.data?.forEach((point: any) => { - if (point?.period_start) { - timestamps.add(point.period_start) - } - }) - }) - - const referenceLineQueries = attributeQueries.filter( - (_, index) => attributes[index].provider === 'reference-line' - ) - - // Combine data points for each timestamp - const combined = Array.from(timestamps) - .sort() - .map((timestamp) => { - const point: any = { timestamp } - - // Add regular attributes - attributes.forEach((attr, index) => { - if (!attr) return - - // Handle custom value attributes (like disk size) - if (attr.customValue !== undefined) { - point[attr.attribute] = attr.customValue - return - } - - // Skip reference line attributes here, we'll add them below - if (attr.provider === 'reference-line') return - - const queryData = attributeQueries[index]?.data?.data - const matchingPoint = queryData?.find((p: any) => p.period_start === timestamp) - let value = matchingPoint?.[attr.attribute] ?? 0 - - // Apply value manipulation if provided - if (attr.manipulateValue && typeof attr.manipulateValue === 'function') { - // Ensure value is a number before manipulation - const numericValue = typeof value === 'number' ? value : Number(value) || 0 - value = attr.manipulateValue(numericValue) - } - - point[attr.attribute] = value - }) - - // Add reference line values for each timestamp - referenceLineQueries.forEach((query: any) => { - const attr = query.data.attribute - const value = query.data.total - point[attr] = value - }) - - return point as DataPoint - }) - - return combined as DataPoint[] - }, [data, attributeQueries, attributes]) - - const loading = isLoading || attributeQueries.some((query: any) => query.isLoading) - - // Calculate highlighted value based on the first attribute's data - const _highlightedValue = useMemo(() => { - if (highlightedValue !== undefined) return highlightedValue - - const firstAttr = attributes[0] - const firstQuery = attributeQueries[0] - const firstData = firstQuery?.data - - if (!firstData) return undefined - - const shouldHighlightMaxValue = - firstAttr.provider === 'daily-stats' && - !firstAttr.attribute.includes('ingress') && - !firstAttr.attribute.includes('egress') && - 'maximum' in firstData - - const shouldHighlightTotalGroupedValue = 'totalGrouped' in firstData - - return shouldHighlightMaxValue - ? firstData.maximum - : firstAttr.provider === 'daily-stats' - ? firstData.total - : shouldHighlightTotalGroupedValue - ? firstData.totalGrouped?.[firstAttr.attribute as keyof typeof firstData.totalGrouped] - : (firstData.data[firstData.data.length - 1] as any)?.[firstAttr.attribute] - }, [highlightedValue, attributes, attributeQueries]) - - if (loading) { + if (isLoading) { return ( @@ -272,11 +159,11 @@ const ComposedChartHandler = ({
{children}
@@ -296,7 +184,7 @@ const ComposedChartHandler = ({ ) } -const useAttributeQueries = ( +export const useAttributeQueries = ( attributes: MultiAttribute[], ref: string | string[] | undefined, startDate: string, @@ -306,16 +194,15 @@ const useAttributeQueries = ( data: ChartData | undefined, isVisible: boolean ) => { - const infraAttributes = attributes - .filter((attr) => attr?.provider === 'infra-monitoring') - .map((attr) => attr.attribute as InfraMonitoringAttribute) - const dailyStatsAttributes = attributes - .filter((attr) => attr?.provider === 'daily-stats') - .map((attr) => attr.attribute as ProjectDailyStatsAttribute) - const referenceLines = attributes.filter((attr) => attr?.provider === 'reference-line') + const projectRef = typeof ref === 'string' ? ref : Array.isArray(ref) ? ref[0] : '' + + const infraAttributes = attributes.filter((attr) => attr.provider === 'infra-monitoring') + const dailyStatsAttributes = attributes.filter((attr) => attr.provider === 'daily-stats') + const mockAttributes = attributes.filter((attr) => attr.provider === 'mock') + const referenceLineAttributes = attributes.filter((attr) => attr.provider === 'reference-line') const infraQueries = useInfraMonitoringQueries( - infraAttributes, + infraAttributes.map((attr) => attr.attribute as InfraMonitoringAttribute), ref, startDate, endDate, @@ -325,7 +212,7 @@ const useAttributeQueries = ( isVisible ) const dailyStatsQueries = useProjectDailyStatsQueries( - dailyStatsAttributes, + dailyStatsAttributes.map((attr) => attr.attribute as ProjectDailyStatsAttribute), ref, startDate, endDate, @@ -335,23 +222,48 @@ const useAttributeQueries = ( isVisible ) - const referenceLineQueries = referenceLines.map((line) => { - let value = line.value || 0 - - return { - data: { - data: [], // Will be populated in combinedData - attribute: line.attribute, - total: value, - maximum: value, - totalGrouped: { [line.attribute]: value }, - }, - isLoading: false, - isError: false, - } - }) - - return [...infraQueries, ...dailyStatsQueries, ...referenceLineQueries] + let infraIdx = 0 + let dailyStatsIdx = 0 + return attributes + .filter((attr) => attr.provider !== 'logs') + .map((attr) => { + if (attr.provider === 'infra-monitoring') { + return { + ...infraQueries[infraIdx++], + data: { ...infraQueries[infraIdx - 1]?.data, provider: 'infra-monitoring' }, + } + } else if (attr.provider === 'daily-stats') { + return { + ...dailyStatsQueries[dailyStatsIdx++], + data: { ...dailyStatsQueries[dailyStatsIdx - 1]?.data, provider: 'daily-stats' }, + } + } else if (attr.provider === 'mock') { + const mockData = getMockDataForAttribute(attr.attribute) + return { + isLoading: false, + data: { ...mockData, provider: 'mock', attribute: attr.attribute }, + } + } else if (attr.provider === 'reference-line') { + let value = attr.value || 0 + return { + data: { + data: [], + attribute: attr.attribute, + total: value, + maximum: value, + totalGrouped: { [attr.attribute]: value }, + provider: 'reference-line', + }, + isLoading: false, + isError: false, + } + } else { + return { + isLoading: false, + data: undefined, + } + } + }) } export default function LazyComposedChartHandler(props: ComposedChartHandlerProps) { diff --git a/apps/studio/components/ui/CopyButton.tsx b/apps/studio/components/ui/CopyButton.tsx index 9ac56ba9df6a7..a45df0d6180fa 100644 --- a/apps/studio/components/ui/CopyButton.tsx +++ b/apps/studio/components/ui/CopyButton.tsx @@ -7,7 +7,6 @@ type CopyButtonBaseProps = { iconOnly?: boolean copyLabel?: string copiedLabel?: string - onCopy?: () => Promise | string } type CopyButtonWithText = CopyButtonBaseProps & { diff --git a/apps/studio/data/integrations/github-branches-query.ts b/apps/studio/data/integrations/github-branches-query.ts index 18a49ece0a0e8..342238466c54e 100644 --- a/apps/studio/data/integrations/github-branches-query.ts +++ b/apps/studio/data/integrations/github-branches-query.ts @@ -19,7 +19,7 @@ export async function getGitHubBranches( }) if (error) handleError(error) - return data as Record[] + return data } export type GitHubBranchesData = Awaited> diff --git a/apps/studio/data/integrations/integrations.types.ts b/apps/studio/data/integrations/integrations.types.ts index cc5c7f3ccce85..a453f58177127 100644 --- a/apps/studio/data/integrations/integrations.types.ts +++ b/apps/studio/data/integrations/integrations.types.ts @@ -259,7 +259,7 @@ export type IntegrationConnectionsCreateVariables = { export type GitHubConnectionCreateVariables = { organizationId: number - connection: components['schemas']['CreateGitHubConnectionsBody'] + connection: components['schemas']['CreateGitHubConnectionBody'] } export type EnvironmentTargets = 'production' | 'preview' | 'development' diff --git a/apps/studio/data/reports/auth-charts.ts b/apps/studio/data/reports/auth-charts.ts new file mode 100644 index 0000000000000..7978511209cce --- /dev/null +++ b/apps/studio/data/reports/auth-charts.ts @@ -0,0 +1,1197 @@ +export const getAuthReportAttributes = (isFreePlan: boolean) => [ + { + id: 'active-users', + label: 'Active Users', + valuePrecision: 0, + hide: false, + showTooltip: false, + showLegend: false, + showMaxValue: false, + hideChartType: false, + defaultChartStyle: 'bar', + attributes: [ + { attribute: 'ActiveUsers', provider: 'logs', label: 'Active Users', enabled: true }, + ], + }, + { + id: 'sign-in-attempts', + label: 'Sign In Attempts by Type', + valuePrecision: 0, + hide: false, + showTooltip: true, + showLegend: true, + showMaxValue: false, + hideChartType: false, + defaultChartStyle: 'bar', + titleTooltip: 'The total number of sign in attempts by grant type.', + attributes: [ + { + attribute: 'SignInAttempts', + provider: 'logs', + label: 'Password', + grantType: 'password', + enabled: true, + }, + { + attribute: 'SignInAttempts', + provider: 'logs', + label: 'PKCE', + grantType: 'pkce', + enabled: true, + }, + { + attribute: 'SignInAttempts', + provider: 'logs', + label: 'Refresh Token', + grantType: 'refresh_token', + enabled: true, + }, + { + attribute: 'SignInAttempts', + provider: 'logs', + label: 'ID Token', + grantType: 'id_token', + enabled: true, + }, + ], + }, + { + id: 'signups', + label: 'Sign Ups', + valuePrecision: 0, + hide: false, + showTooltip: true, + showLegend: false, + showMaxValue: false, + hideChartType: false, + defaultChartStyle: 'bar', + titleTooltip: 'The total number of sign ups.', + attributes: [ + { + attribute: 'TotalSignUps', + provider: 'logs', + label: 'Sign Ups', + enabled: true, + }, + ], + }, + { + id: 'auth-errors', + label: 'Auth Errors', + valuePrecision: 0, + hide: false, + showTooltip: true, + showLegend: true, + showMaxValue: false, + hideChartType: false, + defaultChartStyle: 'bar', + titleTooltip: 'The total number of auth errors by status code.', + attributes: [ + { + attribute: 'ErrorsByStatus', + provider: 'logs', + label: 'Auth Errors', + }, + ], + }, + { + id: 'password-reset-requests', + label: 'Password Reset Requests', + valuePrecision: 0, + hide: false, + showTooltip: false, + showLegend: false, + showMaxValue: false, + hideChartType: false, + defaultChartStyle: 'bar', + attributes: [ + { + attribute: 'PasswordResetRequests', + provider: 'logs', + label: 'Password Reset Requests', + enabled: true, + }, + ], + }, + { + id: 'sign-in-latency', + label: 'Sign In Latency', + valuePrecision: 2, + hide: true, // Jordi: Hidden until we can fix the query + showTooltip: true, + showLegend: true, + showMaxValue: false, + hideChartType: false, + defaultChartStyle: 'line', + titleTooltip: 'Average latency for sign in operations by grant type.', + attributes: [ + { + attribute: 'SignInLatency', + provider: 'logs', + label: 'Password', + grantType: 'password', + enabled: true, + }, + { + attribute: 'SignInLatency', + provider: 'logs', + label: 'PKCE', + grantType: 'pkce', + enabled: true, + }, + { + attribute: 'SignInLatency', + provider: 'logs', + label: 'Refresh Token', + grantType: 'refresh_token', + enabled: true, + }, + { + attribute: 'SignInLatency', + provider: 'logs', + label: 'ID Token', + grantType: 'id_token', + enabled: true, + }, + ], + }, + { + id: 'sign-up-latency', + label: 'Sign Up Latency', + valuePrecision: 2, + hide: true, // Jordi: Hidden until we can fix the query + showTooltip: true, + showLegend: true, + showMaxValue: false, + hideChartType: false, + defaultChartStyle: 'line', + titleTooltip: 'Average latency for sign up operations by provider.', + attributes: [ + { + attribute: 'SignUpLatency', + provider: 'logs', + label: 'Email', + providerType: 'email', + enabled: true, + }, + { + attribute: 'SignUpLatency', + provider: 'logs', + label: 'Google', + providerType: 'google', + enabled: true, + }, + { + attribute: 'SignUpLatency', + provider: 'logs', + label: 'GitHub', + providerType: 'github', + enabled: true, + }, + { + attribute: 'SignUpLatency', + provider: 'logs', + label: 'Apple', + providerType: 'apple', + enabled: true, + }, + ], + }, +] + +/** + * ================================================ + * Mock data below + * TODO: Remove once we have real data + * ================================================ + * */ + +// Time points for all mock data +const DEFAULT_TIME_POINTS = [ + '2025-05-05T13:47:23Z', + '2025-05-05T14:47:23Z', + '2025-05-05T15:47:23Z', + '2025-05-05T16:47:23Z', + '2025-05-05T17:47:23Z', + '2025-05-05T18:47:23Z', + '2025-05-05T19:47:23Z', + '2025-05-05T20:47:23Z', + '2025-05-05T21:47:23Z', + '2025-05-05T22:47:23Z', + '2025-05-05T23:47:23Z', + '2025-05-06T00:47:23Z', + '2025-05-06T01:47:23Z', + '2025-05-06T02:47:23Z', + '2025-05-06T03:47:23Z', + '2025-05-06T04:47:23Z', + '2025-05-06T05:47:23Z', + '2025-05-06T06:47:23Z', + '2025-05-06T07:47:23Z', + '2025-05-06T08:47:23Z', + '2025-05-06T09:47:23Z', + '2025-05-06T10:47:23Z', + '2025-05-06T11:47:23Z', + '2025-05-06T12:47:23Z', + '2025-05-06T13:47:23Z', +] + +// Helper to generate random values with some "realistic" patterns +const generatePatternedValues = ( + length: number, + base: number = 5, + variance: number = 5, + spikeProbability: number = 0.1, + spikeMultiplier: number = 5 +): number[] => { + return Array(length) + .fill(0) + .map(() => { + const hasSpike = Math.random() < spikeProbability + const baseValue = base + Math.floor(Math.random() * variance) + return hasSpike ? baseValue * spikeMultiplier : baseValue + }) +} + +export const getAuthRequestsMockData = (provider: string) => { + let values: number[] = [] + + if (provider === 'Email') { + values = [3, 4, 4, 5, 4, 3, 5, 6, 4, 3, 5, 5, 4, 3, 4, 5, 6, 7, 5, 15, 6, 9, 17, 8, 4] + } else if (provider === 'Google') { + values = [2, 1, 3, 0, 5, 2, 0, 0, 1, 2, 0, 0, 1, 2, 1, 0, 0, 0, 1, 1, 2, 4, 0, 4, 12] + } else if (provider === 'GitHub') { + values = [0, 0, 8, 0, 3, 0, 0, 1, 0, 0, 3, 0, 0, 0, 1, 0, 8, 4, 2, 0, 2, 1, 3, 6, 10] + } else { + values = Array(25).fill(0) + } + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [provider]: values[index], + })) + + return { + data, + yAxisLimit: 100, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +// Add new mock data generator for provider-specific sessions +export const getProviderSessionsMockData = (provider: string) => { + let values: number[] = [] + + // Generate higher base values for sessions compared to signups + if (provider === 'EmailSessions') { + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 15, 8, 0.1, 2) + } else if (provider === 'GoogleSessions') { + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 12, 6, 0.1, 2.5) + } else if (provider === 'GitHubSessions') { + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 10, 5, 0.1, 2.2) + } else { + // For other providers, generate lower values + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 5, 3, 0.1, 2) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [provider]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +// Add new mock data generator for provider-specific churn rates +export const getProviderChurnMockData = (provider: string) => { + let values: number[] = [] + + // Generate realistic churn rates (typically between 2-15%) + if (provider === 'EmailChurn') { + // Email typically has higher churn + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 2, 10, 0.1, 5.2).map((v) => + Math.min(v, 20) + ) + } else if (provider === 'GoogleChurn') { + // Google typically has lower churn due to account persistence + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 8, 2, 0.1, 1.3).map((v) => + Math.min(v, 15) + ) + } else if (provider === 'GitHubChurn') { + // GitHub has moderate churn + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 10, 2.5, 0.1, 1.2).map((v) => + Math.min(v, 18) + ) + } else { + // Other providers have varying churn rates + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 9, 3, 0.1, 1.4).map((v) => + Math.min(v, 20) + ) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [provider]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: 20, // Max 20% churn rate + format: '%', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +// Add new mock data generator for churn rates +export const getChurnRateMockData = (attribute: string) => { + let values: number[] = [] + + // Overall churn rate with some variance + values = generatePatternedValues(DEFAULT_TIME_POINTS.length, 0.6, 2, 0.15, 2.6) + .map((v) => Math.max(0, v - 0.25)) // Allow some values to go to 0 + .map((v) => Math.min(v, 3.2)) + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [attribute]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: 1.5, // Max 1.5% churn rate + format: '%', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +// Mock data generators for new charts +export const getActiveUsersMockData = (metric: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + // Generate more realistic pattern for Daily Active Users + // Create 30 days of data for a better view + const thirtyDays = 30 + const thirtyDayDates = Array(thirtyDays) + .fill(0) + .map((_, i) => { + const date = new Date() + date.setDate(date.getDate() - (thirtyDays - i - 1)) + return date.toISOString() + }) + + // Base values that reflect a gradual growth trend + const baseValue = 200 + const dailyGrowth = 1.5 // Small daily growth factor + + if (metric === 'DAU') { + values = Array(thirtyDays) + .fill(0) + .map((_, i) => { + const dayOfWeek = new Date(thirtyDayDates[i]).getDay() + const isWeekend = dayOfWeek === 0 || dayOfWeek === 6 + + // Base value with growth trend + let value = baseValue + i * dailyGrowth + + // Weekday/weekend pattern (weekends have ~20% less activity) + if (isWeekend) { + value = value * 0.8 + } + + // Add some randomness + const randomFactor = 0.9 + Math.random() * 0.2 // ±10% + value = value * randomFactor + + return Math.round(value) + }) + } else { + // For WAU and MAU (not used in this case) + values = Array(thirtyDays).fill(0) + } + + const data = thirtyDayDates.map((time, index) => ({ + period_start: time, + [metric]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getUserTypesMockData = (type: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (type === 'NewUsers') { + values = generatePatternedValues(length, 40, 20, 0.15, 3) + } else if (type === 'ReturningUsers') { + values = generatePatternedValues(length, 200, 50, 0.1, 1.5) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [type]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getAuthSessionsMockData = (sessionType: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (sessionType === 'TotalSessions') { + values = generatePatternedValues(length, 1200, 300, 0.1, 1.3) + } else if (sessionType === 'WebSessions') { + values = generatePatternedValues(length, 800, 200, 0.1, 1.4) + } else if (sessionType === 'MobileSessions') { + values = generatePatternedValues(length, 400, 150, 0.12, 1.5) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [sessionType]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getSessionDurationMockData = (metric: string) => { + const length = DEFAULT_TIME_POINTS.length + // Session duration in minutes following a relatively stable pattern + const values = generatePatternedValues(length, 15, 5, 0.05, 1.5) + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [metric]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +// Mock data generators for authentication flows +export const getPasswordResetMockData = (metric: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (metric === 'PasswordResetRequests') { + values = generatePatternedValues(length, 25, 15, 0.1, 2) + } else if (metric === 'PasswordResetCompleted') { + values = generatePatternedValues(length, 18, 10, 0.1, 1.5) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [metric]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getEmailVerificationMockData = (metric: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (metric === 'VerificationSent') { + values = generatePatternedValues(length, 50, 20, 0.12, 1.8) + } else if (metric === 'VerificationCompleted') { + values = generatePatternedValues(length, 35, 15, 0.1, 1.5) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [metric]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getVerificationRatesMockData = (metric: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (metric === 'EmailOpenRate') { + // Email open rates between 50-80% + values = generatePatternedValues(length, 65, 15, 0.05, 1.2).map((v) => Math.min(v, 100)) + } else if (metric === 'VerificationRate') { + // Verification completion rates between 60-90% + values = generatePatternedValues(length, 75, 15, 0.05, 1.1).map((v) => Math.min(v, 100)) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [metric]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: 100, // Percentage chart + format: '%', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +export const getMfaUsageMockData = (method: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (method === 'MfaSMS') { + values = generatePatternedValues(length, 15, 8, 0.1, 1.5) + } else if (method === 'MfaAuthenticator') { + values = generatePatternedValues(length, 25, 10, 0.1, 1.4) + } else if (method === 'MfaEmail') { + values = generatePatternedValues(length, 10, 5, 0.1, 1.6) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [method]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getMfaAdoptionMockData = (metric: string) => { + const length = DEFAULT_TIME_POINTS.length + // MFA adoption between 15-35% + const values = generatePatternedValues(length, 25, 10, 0.05, 1.2).map((v) => Math.min(v, 100)) + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [metric]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: 100, // Percentage chart + format: '%', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +// Mock data generators for error and security charts +export const getAuthErrorsMockData = (errorType: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (errorType === 'Status403') { + // 403 Forbidden - Relatively uncommon + values = generatePatternedValues(length, 1, 1, 0.05, 3) + } else if (errorType === 'Status422') { + // 422 Unprocessable Entity - More common validation errors + values = generatePatternedValues(length, 8, 5, 0.12, 2) + } else if (errorType === 'Status429') { + // 429 Too Many Requests - Rate limiting, can be spiky + values = generatePatternedValues(length, 3, 2, 0.15, 4) + } else if (errorType === 'Status500') { + // 500 Internal Server Error - Server errors, usually low but impactful + values = generatePatternedValues(length, 1, 1, 0.08, 5) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [errorType]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getProviderErrorRateMockData = (provider: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + // Error rates between 0.5-8% + if (provider === 'EmailErrorRate') { + values = generatePatternedValues(length, 2.5, 2, 0.15, 2).map((v) => Math.min(v, 10)) + } else if (provider === 'GoogleErrorRate') { + values = generatePatternedValues(length, 1.2, 1, 0.1, 2.5).map((v) => Math.min(v, 10)) + } else if (provider === 'GitHubErrorRate') { + values = generatePatternedValues(length, 1.5, 1.2, 0.12, 2.2).map((v) => Math.min(v, 10)) + } else if (provider === 'FacebookErrorRate') { + values = generatePatternedValues(length, 2, 1.5, 0.1, 2).map((v) => Math.min(v, 10)) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [provider]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: 10, // Percentage chart + format: '%', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +export const getRateLimitingMockData = (eventType: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (eventType === 'IPRateLimited') { + values = generatePatternedValues(length, 4, 3, 0.1, 4) + } else if (eventType === 'UserRateLimited') { + values = generatePatternedValues(length, 2, 2, 0.08, 3) + } else if (eventType === 'BruteForceAttempts') { + values = generatePatternedValues(length, 1, 1, 0.05, 5) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [eventType]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getAuthLatencyMockData = (operationType: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (operationType === 'SignInLatency') { + values = generatePatternedValues(length, 180, 50, 0.1, 1.5) + } else if (operationType === 'SignUpLatency') { + values = generatePatternedValues(length, 250, 70, 0.1, 1.4) + } else if (operationType === 'TokenRefreshLatency') { + values = generatePatternedValues(length, 120, 40, 0.1, 1.6) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [operationType]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(0)), + total, + } +} + +export const getSecurityEventsMockData = (eventType: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (eventType === 'SuspiciousLogins') { + values = generatePatternedValues(length, 3, 2, 0.1, 3) + } else if (eventType === 'NewDeviceLogins') { + values = generatePatternedValues(length, 15, 8, 0.1, 1.5) + } else if (eventType === 'PasswordBreachDetections') { + values = generatePatternedValues(length, 1, 1, 0.05, 5) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [eventType]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getTokenUsageMockData = (metricType: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (metricType === 'TokenIssuance') { + values = generatePatternedValues(length, 100, 40, 0.1, 1.5) + } else if (metricType === 'TokenRefresh') { + values = generatePatternedValues(length, 400, 100, 0.1, 1.3) + } else if (metricType === 'TokenRevocation') { + values = generatePatternedValues(length, 30, 15, 0.1, 2) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [metricType]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +// Mock data generators for conversion metrics +export const getAuthFunnelMockData = (stage: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (stage === 'PageVisits') { + values = generatePatternedValues(length, 500, 200, 0.1, 1.5) + } else if (stage === 'SignupStarts') { + values = generatePatternedValues(length, 120, 50, 0.1, 1.4) + } else if (stage === 'SignupCompletes') { + values = generatePatternedValues(length, 80, 30, 0.1, 1.3) + } else if (stage === 'FirstLogins') { + values = generatePatternedValues(length, 75, 25, 0.1, 1.3) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [stage]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getConversionRatesMockData = (rateType: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (rateType === 'VisitToSignupRate') { + // Visit to signup: 20-30% + values = generatePatternedValues(length, 25, 5, 0.1, 1.2).map((v) => Math.min(v, 40)) + } else if (rateType === 'SignupCompletionRate') { + // Signup completion: 60-80% + values = generatePatternedValues(length, 70, 10, 0.1, 1.1).map((v) => Math.min(v, 90)) + } else if (rateType === 'RetentionRate') { + // 7-day retention: 40-60% + values = generatePatternedValues(length, 50, 10, 0.1, 1.1).map((v) => Math.min(v, 75)) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [rateType]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: 100, // Percentage chart + format: '%', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +export const getOnboardingCompletionMockData = (stage: string) => { + const length = DEFAULT_TIME_POINTS.length + let values: number[] = [] + + if (stage === 'OnboardingStarts') { + values = generatePatternedValues(length, 70, 25, 0.1, 1.4) + } else if (stage === 'ProfileCompletions') { + values = generatePatternedValues(length, 50, 20, 0.1, 1.3) + } else if (stage === 'VerificationCompletions') { + values = generatePatternedValues(length, 40, 15, 0.1, 1.3) + } else { + values = Array(length).fill(0) + } + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [stage]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(2)), + total, + } +} + +export const getOnboardingTimeMockData = (metric: string) => { + const length = DEFAULT_TIME_POINTS.length + // Onboarding time in minutes, generally 5-15 minutes + const values = generatePatternedValues(length, 10, 5, 0.1, 1.3) + + const data = DEFAULT_TIME_POINTS.map((time, index) => ({ + period_start: time, + [metric]: values[index], + })) + + const total = values.reduce((sum, val) => sum + val, 0) + const totalAverage = total / values.length + + return { + data, + yAxisLimit: Math.max(...values) * 1.2, + format: '', + totalAverage: parseFloat(totalAverage.toFixed(1)), + total, + } +} + +// Master mock data resolver - routes to the appropriate data generator based on attribute +export const getMockDataForAttribute = (attribute: string) => { + // Auth Requests (original chart) + if ( + [ + 'Email', + 'Google', + 'GitHub', + 'Phone', + 'SAML', + 'Apple', + 'Azure', + 'Bitbucket', + 'Discord', + 'Facebook', + 'Figma', + 'GitLab', + 'Kakao', + 'KeyCloak', + 'LinkedIn', + 'Notion', + 'Twitch', + 'Twitter', + 'Slack', + 'Spotify', + 'WorkOS', + 'Zoom', + ].includes(attribute) + ) { + return getAuthRequestsMockData(attribute) + } + + // Provider-specific sessions + if ( + [ + 'EmailSessions', + 'PhoneSessions', + 'SAMLSessions', + 'AppleSessions', + 'AzureSessions', + 'BitbucketSessions', + 'DiscordSessions', + 'FacebookSessions', + 'FigmaSessions', + 'GitHubSessions', + 'GitLabSessions', + 'GoogleSessions', + 'KakaoSessions', + 'KeyCloakSessions', + 'LinkedInSessions', + 'NotionSessions', + 'TwitchSessions', + 'TwitterSessions', + 'SlackSessions', + 'SpotifySessions', + 'WorkOSSessions', + 'ZoomSessions', + ].includes(attribute) + ) { + return getProviderSessionsMockData(attribute) + } + + // Provider-specific churn + if ( + [ + 'EmailChurn', + 'PhoneChurn', + 'SAMLChurn', + 'AppleChurn', + 'AzureChurn', + 'BitbucketChurn', + 'DiscordChurn', + 'FacebookChurn', + 'FigmaChurn', + 'GitHubChurn', + 'GitLabChurn', + 'GoogleChurn', + 'KakaoChurn', + 'KeyCloakChurn', + 'LinkedInChurn', + 'NotionChurn', + 'TwitchChurn', + 'TwitterChurn', + 'SlackChurn', + 'SpotifyChurn', + 'WorkOSChurn', + 'ZoomChurn', + ].includes(attribute) + ) { + return getProviderChurnMockData(attribute) + } + + // Overall churn rate + if (attribute === 'ChurnRate') { + return getChurnRateMockData(attribute) + } + + // Active Users charts + if (['DAU', 'WAU', 'MAU'].includes(attribute)) { + return getActiveUsersMockData(attribute) + } + + // User Types charts + if (['NewUsers', 'ReturningUsers'].includes(attribute)) { + return getUserTypesMockData(attribute) + } + + // Auth Sessions charts + if (['TotalSessions', 'WebSessions', 'MobileSessions'].includes(attribute)) { + return getAuthSessionsMockData(attribute) + } + + // Session Duration + if (attribute === 'AvgSessionDuration') { + return getSessionDurationMockData(attribute) + } + + // Password Reset metrics + if (['PasswordResetRequests', 'PasswordResetCompleted'].includes(attribute)) { + return getPasswordResetMockData(attribute) + } + + // Email Verification metrics + if (['VerificationSent', 'VerificationCompleted'].includes(attribute)) { + return getEmailVerificationMockData(attribute) + } + + // Verification Rates + if (['EmailOpenRate', 'VerificationRate'].includes(attribute)) { + return getVerificationRatesMockData(attribute) + } + + // MFA Usage + if (['MfaSMS', 'MfaAuthenticator', 'MfaEmail'].includes(attribute)) { + return getMfaUsageMockData(attribute) + } + + // MFA Adoption Rate + if (attribute === 'MfaAdoptionRate') { + return getMfaAdoptionMockData(attribute) + } + + // Auth Errors + if (['Status403', 'Status422', 'Status429', 'Status500'].includes(attribute)) { + return getAuthErrorsMockData(attribute) + } + + // Provider Error Rates + if ( + ['EmailErrorRate', 'GoogleErrorRate', 'GitHubErrorRate', 'FacebookErrorRate'].includes( + attribute + ) + ) { + return getProviderErrorRateMockData(attribute) + } + + // Rate Limiting + if (['IPRateLimited', 'UserRateLimited', 'BruteForceAttempts'].includes(attribute)) { + return getRateLimitingMockData(attribute) + } + + // Auth Latency + if (['SignInLatency', 'SignUpLatency', 'TokenRefreshLatency'].includes(attribute)) { + return getAuthLatencyMockData(attribute) + } + + // Security Events + if (['SuspiciousLogins', 'NewDeviceLogins', 'PasswordBreachDetections'].includes(attribute)) { + return getSecurityEventsMockData(attribute) + } + + // Token Usage + if (['TokenIssuance', 'TokenRefresh', 'TokenRevocation'].includes(attribute)) { + return getTokenUsageMockData(attribute) + } + + // Auth Funnel + if (['PageVisits', 'SignupStarts', 'SignupCompletes', 'FirstLogins'].includes(attribute)) { + return getAuthFunnelMockData(attribute) + } + + // Conversion Rates + if (['VisitToSignupRate', 'SignupCompletionRate', 'RetentionRate'].includes(attribute)) { + return getConversionRatesMockData(attribute) + } + + // Onboarding Completion + if (['OnboardingStarts', 'ProfileCompletions', 'VerificationCompletions'].includes(attribute)) { + return getOnboardingCompletionMockData(attribute) + } + + // Onboarding Time + if (attribute === 'TimeToCompleteOnboarding') { + return getOnboardingTimeMockData(attribute) + } + + // Default fallback + return { + data: DEFAULT_TIME_POINTS.map((time) => ({ period_start: time, [attribute]: 0 })), + yAxisLimit: 10, + format: '', + totalAverage: 0, + total: 0, + } +} diff --git a/apps/studio/data/reports/auth-report-query.ts b/apps/studio/data/reports/auth-report-query.ts new file mode 100644 index 0000000000000..99f6ec0c35247 --- /dev/null +++ b/apps/studio/data/reports/auth-report-query.ts @@ -0,0 +1,328 @@ +import { useQuery } from '@tanstack/react-query' +import { get } from 'data/fetchers' +import { AnalyticsInterval } from 'data/analytics/constants' +import type { MultiAttribute } from 'components/ui/Charts/ComposedChart.utils' +import { getHttpStatusCodeInfo } from 'lib/http-status-codes' +import { analyticsIntervalToGranularity } from './report.utils' + +/** + * METRICS + * Each chart in the UI has a corresponding metric key. + */ + +const METRIC_KEYS = [ + 'ActiveUsers', + 'SignInAttempts', + 'PasswordResetRequests', + 'TotalSignUps', + 'SignInLatency', + 'SignUpLatency', + 'ErrorsByStatus', +] + +const STATUS_CODE_COLORS: { [key: string]: { light: string; dark: string } } = { + '400': { light: '#FFD54F', dark: '#FFF176' }, + '401': { light: '#FF8A65', dark: '#FFAB91' }, + '403': { light: '#FFB74D', dark: '#FFCC80' }, + '404': { light: '#90A4AE', dark: '#B0BEC5' }, + '409': { light: '#BA68C8', dark: '#CE93D8' }, + '410': { light: '#A1887F', dark: '#BCAAA4' }, + '422': { light: '#FF9800', dark: '#FFB74D' }, + '429': { light: '#E65100', dark: '#F57C00' }, + '500': { light: '#B71C1C', dark: '#D32F2F' }, + '502': { light: '#9575CD', dark: '#B39DDB' }, + '503': { light: '#0097A7', dark: '#4DD0E1' }, + '504': { light: '#C0CA33', dark: '#D4E157' }, + default: { light: '#757575', dark: '#9E9E9E' }, +} + +type MetricKey = (typeof METRIC_KEYS)[number] + +/** + * SQL + * Each metric has a corresponding SQL query. + */ + +const METRIC_SQL: Record string> = { + ActiveUsers: (interval) => { + const granularity = analyticsIntervalToGranularity(interval) + return ` + --active-users + select + timestamp_trunc(timestamp, ${granularity}) as timestamp, + count(distinct json_value(f.event_message, "$.auth_event.actor_id")) as count + from auth_logs f + where json_value(f.event_message, "$.auth_event.action") in ( + 'login', 'user_signedup', 'token_refreshed', 'user_modified', + 'user_recovery_requested', 'user_reauthenticate_requested' + ) + group by timestamp + order by timestamp desc + ` + }, + SignInAttempts: (interval) => { + const granularity = analyticsIntervalToGranularity(interval) + return ` + --sign-in-attempts + select + timestamp_trunc(timestamp, ${granularity}) as timestamp, + json_value(event_message, "$.grant_type") as grant_type, + count(*) as count + from auth_logs + where json_value(event_message, "$.path") = '/token' + group by timestamp, grant_type + order by timestamp desc, grant_type + ` + }, + PasswordResetRequests: (interval) => { + const granularity = analyticsIntervalToGranularity(interval) + return ` + --password-reset-requests + select + timestamp_trunc(timestamp, ${granularity}) as timestamp, + count(*) as count + from auth_logs f + where json_value(f.event_message, "$.auth_event.action") = 'user_recovery_requested' + group by timestamp + order by timestamp desc + ` + }, + TotalSignUps: (interval) => { + const granularity = analyticsIntervalToGranularity(interval) + return ` + --total-signups + select + timestamp_trunc(timestamp, ${granularity}) as timestamp, + count(*) as count + from auth_logs + where json_value(event_message, "$.auth_event.action") = 'user_signedup' + group by timestamp + order by timestamp desc + ` + }, + SignInLatency: (interval) => { + const granularity = analyticsIntervalToGranularity(interval) + return ` + --signin-latency + select + timestamp_trunc(timestamp, ${granularity}) as timestamp, + json_value(event_message, "$.grant_type") as grant_type, + count(*) as request_count, + round(avg(cast(json_value(event_message, "$.duration") as int64)) / 1000000, 2) as avg_latency_ms, + round(min(cast(json_value(event_message, "$.duration") as int64)) / 1000000, 2) as min_latency_ms, + round(max(cast(json_value(event_message, "$.duration") as int64)) / 1000000, 2) as max_latency_ms, + round(approx_quantiles(cast(json_value(event_message, "$.duration") as int64), 100)[offset(50)] / 1000000, 2) as p50_latency_ms, + round(approx_quantiles(cast(json_value(event_message, "$.duration") as int64), 100)[offset(95)] / 1000000, 2) as p95_latency_ms, + round(approx_quantiles(cast(json_value(event_message, "$.duration") as int64), 100)[offset(99)] / 1000000, 2) as p99_latency_ms + from auth_logs + where json_value(event_message, "$.path") = '/token' + group by timestamp, grant_type + order by timestamp desc, grant_type + ` + }, + SignUpLatency: (interval) => { + const granularity = analyticsIntervalToGranularity(interval) + return ` + --signup-latency + select + timestamp_trunc(timestamp, ${granularity}) as timestamp, + json_value(event_message, "$.auth_event.traits.provider") as provider, + round(avg(cast(json_value(event_message, "$.duration") as int64)) / 1000000, 2) as avg_latency_ms, + round(min(cast(json_value(event_message, "$.duration") as int64)) / 1000000, 2) as min_latency_ms, + round(max(cast(json_value(event_message, "$.duration") as int64)) / 1000000, 2) as max_latency_ms, + round(approx_quantiles(cast(json_value(event_message, "$.duration") as int64), 100)[offset(50)] / 1000000, 2) as p50_latency_ms, + round(approx_quantiles(cast(json_value(event_message, "$.duration") as int64), 100)[offset(95)] / 1000000, 2) as p95_latency_ms + from auth_logs + where json_value(event_message, "$.auth_event.action") = 'user_signedup' + and json_value(event_message, "$.status") = '200' + group by timestamp, provider + order by timestamp desc, provider + ` + }, + ErrorsByStatus: (interval) => { + const granularity = analyticsIntervalToGranularity(interval) + return ` + --auth-errors-by-status +select + timestamp_trunc(timestamp, ${granularity}) as timestamp, + count(*) as count, + response.status_code +from edge_logs + cross join unnest(metadata) as m + cross join unnest(m.request) as request + cross join unnest(m.response) as response +where path like '%/auth%' + and response.status_code >= 400 and response.status_code <= 599 +group by timestamp, status_code +order by timestamp desc + ` + }, +} + +/** + * FORMATTERS. + * Metrics need to be formatted before being passed on to the UI charts. + */ + +function defaultFormatter(rawData: any, attributes: MultiAttribute[]) { + const chartAttributes = attributes + if (!rawData) return { data: undefined, chartAttributes } + const result = rawData.result || [] + const timestamps = new Set(result.map((p: any) => p.timestamp)) + const data = Array.from(timestamps) + .sort() + .map((timestamp) => { + const point: any = { period_start: timestamp } + chartAttributes.forEach((attr) => { + point[attr.attribute] = 0 + }) + const matchingPoints = result.filter((p: any) => p.timestamp === timestamp) + matchingPoints.forEach((p: any) => { + point[attributes[0].attribute] = p.count + }) + return point + }) + return { data, chartAttributes } +} + +const METRIC_FORMATTER: Record< + MetricKey, + ( + rawData: any, + attributes: MultiAttribute[], + logsMetric: string + ) => { data: any; chartAttributes: any } +> = { + ActiveUsers: (rawData, attributes) => defaultFormatter(rawData, attributes), + SignInAttempts: (rawData, attributes) => { + const chartAttributes = attributes.map((attr) => { + if (attr.attribute === 'SignInAttempts' && attr.grantType) { + return { ...attr, attribute: `${attr.attribute}_${attr.grantType}` } + } + return attr + }) + if (!rawData) return { data: undefined, chartAttributes } + const result = rawData.result || [] + const timestamps = new Set(result.map((p: any) => p.timestamp)) + const data = Array.from(timestamps) + .sort() + .map((timestamp) => { + const point: any = { period_start: timestamp } + chartAttributes.forEach((attr) => { + point[attr.attribute] = 0 + }) + const matchingPoints = result.filter((p: any) => p.timestamp === timestamp) + matchingPoints.forEach((p: any) => { + point[`SignInAttempts_${p.grant_type}`] = p.count + }) + return point + }) + return { data, chartAttributes } + }, + PasswordResetRequests: (rawData, attributes) => defaultFormatter(rawData, attributes), + TotalSignUps: (rawData, attributes) => defaultFormatter(rawData, attributes), + SignInLatency: (rawData, attributes) => defaultFormatter(rawData, attributes), + SignUpLatency: (rawData, attributes) => defaultFormatter(rawData, attributes), + ErrorsByStatus: (rawData, attributes) => { + if (!rawData) return { data: undefined, chartAttributes: attributes } + const result = rawData.result || [] + + const statusCodes = Array.from(new Set(result.map((p: any) => p.status_code))) + + const chartAttributes = statusCodes.map((statusCode) => { + const statusCodeInfo = getHttpStatusCodeInfo(Number(statusCode)) + const color = STATUS_CODE_COLORS[String(statusCode)] || STATUS_CODE_COLORS.default + + return { + attribute: `status_${statusCode}`, + label: `${statusCode} ${statusCodeInfo.label}`, + provider: 'logs', + enabled: true, + color: color, + statusCode: String(statusCode), + } + }) + + const timestamps = new Set(result.map((p: any) => p.timestamp)) + const data = Array.from(timestamps) + .sort() + .map((timestamp) => { + const point: any = { period_start: timestamp } + chartAttributes.forEach((attr) => { + point[attr.attribute] = 0 + }) + const matchingPoints = result.filter((p: any) => p.timestamp === timestamp) + matchingPoints.forEach((p: any) => { + point[`status_${p.status_code}`] = p.count + }) + return point + }) + + return { data, chartAttributes } + }, +} + +/** + * REPORT QUERY. + * Fetching and state management for the report. + */ + +export function useAuthLogsReport({ + projectRef, + attributes, + startDate, + endDate, + interval, + enabled = true, +}: { + projectRef: string + attributes: MultiAttribute[] + startDate: string + endDate: string + interval: AnalyticsInterval + enabled?: boolean +}) { + const logsMetric = attributes.length > 0 ? attributes[0].attribute : '' + + const isAuthMetric = METRIC_KEYS.includes(logsMetric) + + const sql = isAuthMetric ? METRIC_SQL[logsMetric](interval) : '' + + const { + data: rawData, + error, + isLoading, + } = useQuery( + ['auth-logs-report', projectRef, logsMetric, startDate, endDate, interval, sql], + async () => { + const { data, error } = await get(`/platform/projects/{ref}/analytics/endpoints/logs.all`, { + params: { + path: { ref: projectRef }, + query: { + sql, + iso_timestamp_start: startDate, + iso_timestamp_end: endDate, + }, + }, + }) + if (error) throw error + return data + }, + { + enabled: Boolean(projectRef && sql && enabled && isAuthMetric), + refetchOnWindowFocus: false, + } + ) + + // Use formatter if available + const formatter = + (isAuthMetric ? METRIC_FORMATTER[logsMetric as MetricKey] : undefined) || defaultFormatter + const { data, chartAttributes } = formatter(rawData, attributes, logsMetric) + + return { + data, + attributes: chartAttributes, + isLoading, + error, + } +} diff --git a/apps/studio/data/reports/report.utils.ts b/apps/studio/data/reports/report.utils.ts new file mode 100644 index 0000000000000..e41bf7e759d87 --- /dev/null +++ b/apps/studio/data/reports/report.utils.ts @@ -0,0 +1,21 @@ +import { AnalyticsInterval } from 'data/analytics/constants' + +export type Granularity = 'minute' | 'hour' | 'day' | 'week' +export function analyticsIntervalToGranularity(interval: AnalyticsInterval): Granularity { + switch (interval) { + case '1m': + return 'minute' + case '5m': + return 'minute' + case '10m': + return 'minute' + case '30m': + return 'minute' + case '1h': + return 'hour' + case '1d': + return 'day' + default: + return 'hour' + } +} diff --git a/apps/studio/hooks/useChartData.ts b/apps/studio/hooks/useChartData.ts new file mode 100644 index 0000000000000..a413d7ee72684 --- /dev/null +++ b/apps/studio/hooks/useChartData.ts @@ -0,0 +1,188 @@ +/** + * useChartData + * + * A hook for fetching and processing data for a chart. + * This hook is responsible for all the data fetching, combining, and state management logic + * that was previously inside ComposedChartHandler. + * + * It takes all necessary parameters like project reference, date range, and attributes, + * and returns the final chart data, loading state, and derived attributes. + */ +import { useMemo } from 'react' +import { useRouter } from 'next/router' + +import { useDatabaseSelectorStateSnapshot } from 'state/database-selector' +import type { AnalyticsInterval, DataPoint } from 'data/analytics/constants' +import { useAuthLogsReport } from 'data/reports/auth-report-query' +import type { ChartData } from 'components/ui/Charts/Charts.types' +import type { MultiAttribute } from 'components/ui/Charts/ComposedChart.utils' +import { useAttributeQueries } from 'components/ui/Charts/ComposedChartHandler' + +export const useChartData = ({ + attributes, + startDate, + endDate, + interval, + data, + highlightedValue, +}: { + attributes: MultiAttribute[] + startDate: string + endDate: string + interval: string + data?: ChartData + highlightedValue?: string | number +}) => { + const router = useRouter() + const { ref } = router.query + const state = useDatabaseSelectorStateSnapshot() + + const logsAttributes = attributes.filter((attr) => attr.provider === 'logs') + const nonLogsAttributes = attributes.filter((attr) => attr.provider !== 'logs') + + const { + data: logsData, + attributes: logsChartAttributes, + isLoading: isLogsLoading, + } = useAuthLogsReport({ + projectRef: ref as string, + attributes: logsAttributes, + startDate, + endDate, + interval: interval as AnalyticsInterval, + enabled: logsAttributes.length > 0, + }) + + const chartAttributes = useMemo( + () => nonLogsAttributes.concat(logsChartAttributes || []), + [nonLogsAttributes, logsChartAttributes] + ) + + const databaseIdentifier = state.selectedDatabaseId + + // Use the custom hook at the top level of the component + const attributeQueries = useAttributeQueries( + attributes, + ref, + startDate, + endDate, + interval as AnalyticsInterval, + databaseIdentifier, + data, + true + ) + + // Combine all the data into a single dataset + const combinedData = useMemo(() => { + if (data) return data + + const regularAttributeQueries = attributeQueries.filter( + (q) => q.data?.provider !== 'logs' && q.data?.provider !== 'reference-line' + ) + const isLoading = + (logsAttributes.length > 0 && isLogsLoading) || + regularAttributeQueries.some((query: any) => query.isLoading) + if (isLoading) { + return undefined + } + + const hasError = regularAttributeQueries.some((query: any) => !query.data) + if (hasError) { + return undefined + } + + // Get all unique timestamps from all datasets + const timestamps = new Set() + if (logsData) { + logsData.forEach((point: any) => { + if (point?.period_start) { + timestamps.add(point.period_start) + } + }) + } + regularAttributeQueries.forEach((query: any) => { + query.data?.data?.forEach((point: any) => { + if (point?.period_start) { + timestamps.add(point.period_start) + } + }) + }) + + const referenceLineQueries = attributeQueries.filter( + (q) => q.data?.provider === 'reference-line' + ) + + // Combine data points for each timestamp + const combined = Array.from(timestamps) + .sort() + .map((timestamp) => { + const point: any = { period_start: timestamp } + + const logPoint = logsData?.find((p: any) => p.period_start === timestamp) || {} + Object.assign(point, logPoint) + + chartAttributes.forEach((attr) => { + if (!attr) return + if (attr.provider === 'logs') return + if (attr.provider === 'reference-line') return + if (attr.customValue !== undefined) { + point[attr.attribute] = attr.customValue + return + } + + const query = regularAttributeQueries.find((q) => q.data?.attribute === attr.attribute) + const matchingPoint = query?.data?.data?.find((p: any) => p.period_start === timestamp) + point[attr.attribute] = matchingPoint?.[attr.attribute] ?? 0 + }) + + // Add reference line values for each timestamp + referenceLineQueries.forEach((query: any) => { + const attr = query.data.attribute + const value = query.data.total + point[attr] = value + }) + + return point as DataPoint + }) + + return combined as DataPoint[] + }, [data, attributeQueries, attributes, chartAttributes, isLogsLoading, logsData, logsAttributes]) + + const loading = + (logsAttributes.length > 0 && isLogsLoading) || + attributeQueries.some((query: any) => query.isLoading) + + // Calculate highlighted value based on the first attribute's data + const _highlightedValue = useMemo(() => { + if (highlightedValue !== undefined) return highlightedValue + + const firstAttr = attributes[0] + const firstQuery = attributeQueries[0] + const firstData = firstQuery?.data + + if (!firstData) return undefined + + const shouldHighlightMaxValue = + firstAttr.provider === 'daily-stats' && + !firstAttr.attribute.includes('ingress') && + !firstAttr.attribute.includes('egress') && + 'maximum' in firstData + + const shouldHighlightTotalGroupedValue = 'totalGrouped' in firstData + + return shouldHighlightMaxValue + ? firstData.maximum + : firstAttr.provider === 'daily-stats' + ? firstData.total + : shouldHighlightTotalGroupedValue + ? firstData.totalGrouped?.[firstAttr.attribute as keyof typeof firstData.totalGrouped] + : (firstData.data?.[firstData.data?.length - 1] as any)?.[firstAttr.attribute] + }, [highlightedValue, attributes, attributeQueries]) + + return { + data: combinedData, + isLoading: loading, + chartAttributes, + highlightedValue: _highlightedValue, + } +} diff --git a/apps/studio/lib/http-status-codes.test.ts b/apps/studio/lib/http-status-codes.test.ts new file mode 100644 index 0000000000000..171f4950fe091 --- /dev/null +++ b/apps/studio/lib/http-status-codes.test.ts @@ -0,0 +1,24 @@ +import { describe, expect, it } from 'vitest' +import { getHttpStatusCodeInfo } from './http-status-codes' + +describe('getHttpStatusCodeInfo', () => { + it('should return the correct status code info', () => { + const statusCodeInfo = getHttpStatusCodeInfo(400) + expect(statusCodeInfo).toEqual({ + code: 400, + name: 'BAD_REQUEST', + message: 'The server cannot or will not process the request due to an apparent client error.', + label: 'Bad Request', + }) + }) + + it('should return unknown for an unknown status code', () => { + const statusCodeInfo = getHttpStatusCodeInfo(999) + expect(statusCodeInfo).toEqual({ + code: 999, + name: 'UNKNOWN', + message: 'Unknown status code', + label: 'Unknown', + }) + }) +}) diff --git a/apps/studio/lib/http-status-codes.ts b/apps/studio/lib/http-status-codes.ts new file mode 100644 index 0000000000000..4e0d2042985d9 --- /dev/null +++ b/apps/studio/lib/http-status-codes.ts @@ -0,0 +1,30 @@ +import { status } from 'http-status' + +export function getHttpStatusCodeInfo(codeNumber: number): { + code: number + name: string + message: string + label: string +} { + type StatusCodeKey = keyof typeof status + + if (!(codeNumber in status)) { + return { + code: codeNumber, + name: 'UNKNOWN', + message: 'Unknown status code', + label: 'Unknown', + } + } + + const statusCodeLabel = status[codeNumber as StatusCodeKey] + const statusCodeMessage = status[`${codeNumber}_MESSAGE` as StatusCodeKey] + const statusCodeName = status[`${codeNumber}_NAME` as StatusCodeKey] + + return { + code: codeNumber, + name: statusCodeName as string, + label: statusCodeLabel as string, + message: statusCodeMessage as string, + } +} diff --git a/apps/studio/package.json b/apps/studio/package.json index b81ddd9fbed92..f7e586bacd0f2 100644 --- a/apps/studio/package.json +++ b/apps/studio/package.json @@ -84,6 +84,7 @@ "framer-motion": "^11.11.17", "generate-password-browser": "^1.1.0", "html-to-image": "^1.10.8", + "http-status": "^2.1.0", "icons": "workspace:*", "idb": "^8.0.2", "immutability-helper": "^3.1.1", diff --git a/apps/studio/pages/project/[ref]/reports/auth.tsx b/apps/studio/pages/project/[ref]/reports/auth.tsx new file mode 100644 index 0000000000000..74a0fea6a27f2 --- /dev/null +++ b/apps/studio/pages/project/[ref]/reports/auth.tsx @@ -0,0 +1,166 @@ +import { useState } from 'react' +import { useQueryClient } from '@tanstack/react-query' +import dayjs from 'dayjs' +import { ArrowRight, RefreshCw } from 'lucide-react' +import { useParams } from 'common' + +import ReportHeader from 'components/interfaces/Reports/ReportHeader' +import ReportPadding from 'components/interfaces/Reports/ReportPadding' +import DefaultLayout from 'components/layouts/DefaultLayout' +import ReportsLayout from 'components/layouts/ReportsLayout/ReportsLayout' +import { ButtonTooltip } from 'components/ui/ButtonTooltip' +import { DateRangePicker } from 'components/ui/DateRangePicker' + +import { useCurrentOrgPlan } from 'hooks/misc/useCurrentOrgPlan' +import { TIME_PERIODS_INFRA } from 'lib/constants/metrics' +import { getAuthReportAttributes } from 'data/reports/auth-charts' + +import ReportChart from 'components/interfaces/Reports/ReportChart' +import type { NextPageWithLayout } from 'types' + +const AuthReport: NextPageWithLayout = () => { + return ( + + + + ) +} + +AuthReport.getLayout = (page) => ( + + {page} + +) + +export type UpdateDateRange = (from: string, to: string) => void +export default AuthReport + +const AuthUsage = () => { + const { ref } = useParams() + + const defaultStart = dayjs().subtract(1, 'day').toISOString() + const defaultEnd = dayjs().toISOString() + const [dateRange, setDateRange] = useState({ + period_start: { date: defaultStart, time_period: '1d' }, + period_end: { date: defaultEnd, time_period: 'today' }, + interval: '1h', + }) + + const queryClient = useQueryClient() + const [isRefreshing, setIsRefreshing] = useState(false) + + const { plan: orgPlan, isLoading: isOrgPlanLoading } = useCurrentOrgPlan() + const isFreePlan = !isOrgPlanLoading && orgPlan?.id === 'free' + + const AUTH_REPORT_ATTRIBUTES = getAuthReportAttributes(isFreePlan) + + const onRefreshReport = async () => { + if (!dateRange) return + + setIsRefreshing(true) + AUTH_REPORT_ATTRIBUTES.forEach((attr) => { + attr.attributes.forEach((subAttr) => { + queryClient.invalidateQueries([ + 'auth-metrics', + ref, + subAttr.attribute, + dateRange.period_start.date, + dateRange.period_end.date, + dateRange.interval, + ]) + }) + }) + setTimeout(() => setIsRefreshing(false), 1000) + } + + const handleIntervalGranularity = (from: string, to: string) => { + const conditions = { + '1m': dayjs(to).diff(from, 'hour') < 3, // less than 3 hours + '10m': dayjs(to).diff(from, 'hour') < 6, // less than 6 hours + '30m': dayjs(to).diff(from, 'hour') < 18, // less than 18 hours + '1h': dayjs(to).diff(from, 'day') < 10, // less than 10 days + '1d': dayjs(to).diff(from, 'day') >= 10, // more than 10 days + } + + switch (true) { + case conditions['1m']: + return '1m' + case conditions['10m']: + return '10m' + case conditions['30m']: + return '30m' + default: + return '1h' + } + } + + const updateDateRange: UpdateDateRange = (from: string, to: string) => { + setDateRange({ + period_start: { date: from, time_period: '1d' }, + period_end: { date: to, time_period: 'today' }, + interval: handleIntervalGranularity(from, to), + }) + } + + return ( + <> + +
+
+
+ } + className="w-7" + tooltip={{ content: { side: 'bottom', text: 'Refresh report' } }} + onClick={onRefreshReport} + /> +
+ { + if (values.interval === '1d') { + setDateRange({ ...values, interval: '1h' }) + } else { + setDateRange(values) + } + }} + /> + {dateRange && ( +
+

+ {dayjs(dateRange.period_start.date).format('MMM D, h:mma')} +

+

+ +

+

+ {dayjs(dateRange.period_end.date).format('MMM D, h:mma')} +

+
+ )} +
+
+
+ +
+ {dateRange && + AUTH_REPORT_ATTRIBUTES.filter((attr) => !attr.hide).map((attr, i) => ( + + ))} +
+
+ + ) +} diff --git a/apps/studio/pages/project/[ref]/reports/database.tsx b/apps/studio/pages/project/[ref]/reports/database.tsx index 7e88300548970..3a1952fc28aad 100644 --- a/apps/studio/pages/project/[ref]/reports/database.tsx +++ b/apps/studio/pages/project/[ref]/reports/database.tsx @@ -21,7 +21,6 @@ import ChartHandler from 'components/ui/Charts/ChartHandler' import Panel from 'components/ui/Panel' import ShimmerLine from 'components/ui/ShimmerLine' import { useDatabaseSelectorStateSnapshot } from 'state/database-selector' -import ComposedChartHandler from 'components/ui/Charts/ComposedChartHandler' import { DateRangePicker } from 'components/ui/DateRangePicker' import GrafanaPromoBanner from 'components/ui/GrafanaPromoBanner' @@ -37,8 +36,8 @@ import { useSelectedOrganization } from 'hooks/misc/useSelectedOrganization' import { TIME_PERIODS_INFRA } from 'lib/constants/metrics' import { formatBytes } from 'lib/helpers' +import ReportChart from 'components/interfaces/Reports/ReportChart' import type { NextPageWithLayout } from 'types' -import type { MultiAttribute } from 'components/ui/Charts/ComposedChart.utils' const DatabaseReport: NextPageWithLayout = () => { return ( @@ -208,15 +207,10 @@ const DatabaseUsage = () => { return ( <> -
-
- -
-
-
+
{
{dateRange && REPORT_ATTRIBUTES_V2.filter((chart) => !chart.hide).map((chart) => ( - ))}
diff --git a/packages/api-types/types/api.d.ts b/packages/api-types/types/api.d.ts index 4af05d16429cf..58e32b4292b1e 100644 --- a/packages/api-types/types/api.d.ts +++ b/packages/api-types/types/api.d.ts @@ -1571,7 +1571,7 @@ export interface components { result?: unknown[] } ApiKeyResponse: { - api_key: string + api_key?: string | null description?: string | null hash?: string | null id?: string | null @@ -1878,6 +1878,7 @@ export interface components { /** Format: int64 */ created_at?: number entrypoint_path?: string + ezbr_sha256?: string id: string import_map?: boolean import_map_path?: string @@ -1893,6 +1894,7 @@ export interface components { /** Format: int64 */ created_at: number entrypoint_path?: string + ezbr_sha256?: string id: string import_map?: boolean import_map_path?: string @@ -2025,6 +2027,42 @@ export interface components { CreateSigningKeyBody: { /** @enum {string} */ algorithm: 'EdDSA' | 'ES256' | 'RS256' | 'HS256' + private_jwk?: + | { + d: string + dp: string + dq: string + /** @enum {string} */ + e: 'AQAB' + /** @enum {string} */ + kty: 'RSA' + n: string + p: string + q: string + qi: string + } + | { + /** @enum {string} */ + crv: 'P-256' + d: string + /** @enum {string} */ + kty: 'EC' + x: string + y: string + } + | { + /** @enum {string} */ + crv: 'Ed25519' + d: string + /** @enum {string} */ + kty: 'OKP' + x: string + } + | { + k: string + /** @enum {string} */ + kty: 'oct' + } /** @enum {string} */ status?: 'in_use' | 'standby' } @@ -2122,6 +2160,7 @@ export interface components { /** Format: int64 */ created_at: number entrypoint_path?: string + ezbr_sha256?: string id: string import_map?: boolean import_map_path?: string @@ -2138,6 +2177,7 @@ export interface components { /** Format: int64 */ created_at: number entrypoint_path?: string + ezbr_sha256?: string id: string import_map?: boolean import_map_path?: string @@ -2484,10 +2524,9 @@ export interface components { | 'preview' duration_estimate_hours: number eligible: boolean - extension_dependent_objects: string[] latest_app_version: string legacy_auth_custom_roles: string[] - potential_breaking_changes: string[] + objects_to_be_dropped: string[] target_upgrade_versions: { app_version: string /** @enum {string} */ @@ -2495,6 +2534,8 @@ export interface components { /** @enum {string} */ release_channel: 'internal' | 'alpha' | 'beta' | 'ga' | 'withdrawn' | 'preview' }[] + unsupported_extensions: string[] + user_defined_objects_in_internal_schemas: string[] } ProjectUpgradeInitiateResponse: { tracking_id: string @@ -6118,6 +6159,7 @@ export interface operations { parameters: { query?: { entrypoint_path?: string + ezbr_sha256?: string /** @description Boolean string, true or false */ import_map?: boolean import_map_path?: string @@ -6239,6 +6281,7 @@ export interface operations { parameters: { query?: { entrypoint_path?: string + ezbr_sha256?: string /** @description Boolean string, true or false */ import_map?: boolean import_map_path?: string diff --git a/packages/api-types/types/platform.d.ts b/packages/api-types/types/platform.d.ts index 208765a7f5e26..28f0f1e6e7817 100644 --- a/packages/api-types/types/platform.d.ts +++ b/packages/api-types/types/platform.d.ts @@ -622,6 +622,62 @@ export interface paths { patch?: never trace?: never } + '/platform/integrations/github/repositories/{repositoryId}/branches': { + parameters: { + query?: never + header?: never + path?: never + cookie?: never + } + /** List GitHub repository branches */ + get: operations['GitHubRepositoriesController_listRepositoryBranches'] + put?: never + post?: never + delete?: never + options?: never + head?: never + patch?: never + trace?: never + } + '/platform/integrations/github/repositories/{repositoryId}/branches/{branchName}': { + parameters: { + query?: never + header?: never + path?: never + cookie?: never + } + /** + * Get GitHub repository branch + * @deprecated + * @description This is a temporary endpoint before dashboard switches to combo box + */ + get: operations['GitHubRepositoriesController_getRepository'] + put?: never + post?: never + delete?: never + options?: never + head?: never + patch?: never + trace?: never + } + '/platform/integrations/private-link/{organization_slug}': { + parameters: { + query?: never + header?: never + path?: never + cookie?: never + } + /** Get organization's PrivateLink configuration. */ + get: operations['PrivateLinkController_getPrivateLinkConfig'] + /** Update organization's PrivateLink configuration. */ + put: operations['PrivateLinkController_updatePrivateLinkConfig'] + post?: never + delete?: never + options?: never + head?: never + patch?: never + trace?: never + } '/platform/integrations/vercel': { parameters: { query?: never @@ -1479,23 +1535,6 @@ export interface paths { patch?: never trace?: never } - '/platform/organizations/{slug}/transfer': { - parameters: { - query?: never - header?: never - path?: never - cookie?: never - } - get?: never - put?: never - /** Transfers the organization to the given member */ - post: operations['TransferController_transferOrganization'] - delete?: never - options?: never - head?: never - patch?: never - trace?: never - } '/platform/organizations/{slug}/usage': { parameters: { query?: never @@ -2104,10 +2143,10 @@ export interface paths { cookie?: never } /** Gets project's logs */ - get: operations['LogsController_getApiPaths'] + get: operations['LogsController_getProjectLogsViaGet'] put?: never - /** Post project's logs */ - post: operations['LogsController_postApiPaths'] + /** Gets project's logs */ + post: operations['LogsController_getProjectLogsViaPost'] delete?: never options?: never head?: never @@ -4422,10 +4461,24 @@ export interface components { CreateGitHubAuthorizationBody: { code: string } - CreateGitHubConnectionsBody: { + CreateGitHubConnectionBody: { + branch_limit?: number installation_id: number + new_branch_per_pr?: boolean project_ref: string repository_id: number + supabase_changes_only?: boolean + workdir?: string + } + CreateGitHubConnectionResponse: { + branch_limit: number + id: number + inserted_at: string + installation_id: number + new_branch_per_pr: boolean + supabase_changes_only: boolean + updated_at: string + workdir: string } CreateInvitationBody: { email: string @@ -4649,8 +4702,7 @@ export interface components { | '48xlarge_optimized_cpu' | '48xlarge_high_memory' name: string - org_id?: number - organization_slug?: string + organization_slug: string /** * @description Postgres engine version. If not provided, the latest version will be used. * @enum {string} @@ -5352,6 +5404,11 @@ export interface components { remediation: string title: string }[] + GetProjectLogsBody: { + iso_timestamp_end?: string + iso_timestamp_start?: string + sql?: string + } GetPublicUrlBody: { options?: { download?: boolean @@ -5617,6 +5674,9 @@ export interface components { sender_id: number user_id: number } + GitHubBranchResponse: { + name: string + } GoTrueConfigResponse: { API_MAX_REQUEST_DURATION: number | null DB_MAX_POOL_SIZE: number | null @@ -5939,6 +5999,7 @@ export interface components { id: number inserted_at: string installation_id: number + new_branch_per_pr: boolean project: { id: number name: string @@ -5958,6 +6019,14 @@ export interface components { workdir: string }[] } + ListGitHubRepositoriesResponse: { + repositories: { + default_branch: string + id: number + installation_id: number + name: string + }[] + } ListNotificationExceptionsResponse: { exceptions: { /** Format: uuid */ @@ -5991,6 +6060,11 @@ export interface components { oauth_app_id: string }[] } + ListRepositoryBranchesResponse: { + branches: { + name: string + }[] + } LoadBalancerDetailResponse: { databases: { identifier: string @@ -6705,9 +6779,6 @@ export interface components { name: string schema: string } - PostProjectLogsBody: { - sql: string - } PreviewProjectTransferResponse: { errors: { key: string @@ -6736,6 +6807,12 @@ export interface components { message: string }[] } + PrivateLinkResponse: { + appliedSuccessfully: boolean + currentConfig: { + enabled: boolean + } + } Profile: { first_name: string gotrue_id: string @@ -7510,12 +7587,6 @@ export interface components { page_url: string pathname: string } - TransferOrganizationBody: { - /** Format: uuid */ - member_gotrue_id: string - member_id?: number - org_id: number - } TransferProjectBody: { target_organization_slug: string } @@ -7818,6 +7889,7 @@ export interface components { } UpdateGitHubConnectionBody: { branch_limit?: number + new_branch_per_pr?: boolean supabase_changes_only?: boolean workdir?: string } @@ -8151,6 +8223,11 @@ export interface components { db_schema: string max_rows: number } + UpdatePrivateLinkBody: { + requestedConfig: { + enabled: boolean + } + } UpdateProfileBody: { first_name?: string last_name?: string @@ -9637,7 +9714,9 @@ export interface operations { headers: { [name: string]: unknown } - content?: never + content: { + 'application/json': components['schemas']['GitHubBranchResponse'][] + } } /** @description Failed to list GitHub connection branches */ 500: { @@ -9664,7 +9743,9 @@ export interface operations { headers: { [name: string]: unknown } - content?: never + content: { + 'application/json': components['schemas']['GitHubBranchResponse'] + } } /** @description Failed to get GitHub connection branch */ 500: { @@ -9712,7 +9793,7 @@ export interface operations { } requestBody: { content: { - 'application/json': components['schemas']['CreateGitHubConnectionsBody'] + 'application/json': components['schemas']['CreateGitHubConnectionBody'] } } responses: { @@ -9720,7 +9801,9 @@ export interface operations { headers: { [name: string]: unknown } - content?: never + content: { + 'application/json': components['schemas']['CreateGitHubConnectionResponse'] + } } /** @description Failed to create project connections */ 500: { @@ -9800,7 +9883,9 @@ export interface operations { headers: { [name: string]: unknown } - content?: never + content: { + 'application/json': components['schemas']['ListGitHubRepositoriesResponse'] + } } /** @description Failed to get GitHub repositories for user */ 500: { @@ -9811,6 +9896,123 @@ export interface operations { } } } + GitHubRepositoriesController_listRepositoryBranches: { + parameters: { + query?: never + header?: never + path: { + repositoryId: number + } + cookie?: never + } + requestBody?: never + responses: { + 200: { + headers: { + [name: string]: unknown + } + content: { + 'application/json': components['schemas']['ListRepositoryBranchesResponse'] + } + } + /** @description Failed to list GitHub repository branches */ + 500: { + headers: { + [name: string]: unknown + } + content?: never + } + } + } + GitHubRepositoriesController_getRepository: { + parameters: { + query?: never + header?: never + path: { + branchName: string + repositoryId: number + } + cookie?: never + } + requestBody?: never + responses: { + 200: { + headers: { + [name: string]: unknown + } + content: { + 'application/json': components['schemas']['GitHubBranchResponse'] + } + } + /** @description Failed to get GitHub repository branch */ + 500: { + headers: { + [name: string]: unknown + } + content?: never + } + } + } + PrivateLinkController_getPrivateLinkConfig: { + parameters: { + query?: never + header?: never + path: { + organization_slug: string + } + cookie?: never + } + requestBody?: never + responses: { + 200: { + headers: { + [name: string]: unknown + } + content: { + 'application/json': components['schemas']['PrivateLinkResponse'] + } + } + /** @description Failed to retrieve organization's PrivateLink config */ + 500: { + headers: { + [name: string]: unknown + } + content?: never + } + } + } + PrivateLinkController_updatePrivateLinkConfig: { + parameters: { + query?: never + header?: never + path: { + organization_slug: string + } + cookie?: never + } + requestBody: { + content: { + 'application/json': components['schemas']['UpdatePrivateLinkBody'] + } + } + responses: { + 200: { + headers: { + [name: string]: unknown + } + content: { + 'application/json': components['schemas']['PrivateLinkResponse'] + } + } + /** @description Failed to update organization's PrivateLink configuration. */ + 500: { + headers: { + [name: string]: unknown + } + content?: never + } + } + } VercelIntegrationController_createVercelIntegration: { parameters: { query?: never @@ -12051,34 +12253,6 @@ export interface operations { } } } - TransferController_transferOrganization: { - parameters: { - query?: never - header?: never - path?: never - cookie?: never - } - requestBody: { - content: { - 'application/json': components['schemas']['TransferOrganizationBody'] - } - } - responses: { - 201: { - headers: { - [name: string]: unknown - } - content?: never - } - /** @description Failed to update owner */ - 500: { - headers: { - [name: string]: unknown - } - content?: never - } - } - } OrgUsageController_getOrgUsage: { parameters: { query?: { @@ -14442,12 +14616,11 @@ export interface operations { } } } - LogsController_getApiPaths: { + LogsController_getProjectLogsViaGet: { parameters: { query?: { iso_timestamp_end?: string iso_timestamp_start?: string - project?: string sql?: string } header?: never @@ -14482,14 +14655,9 @@ export interface operations { } } } - LogsController_postApiPaths: { + LogsController_getProjectLogsViaPost: { parameters: { - query?: { - iso_timestamp_end?: string - iso_timestamp_start?: string - project?: string - sql?: string - } + query?: never header?: never path: { /** @description Project ref */ @@ -14499,7 +14667,7 @@ export interface operations { } requestBody: { content: { - 'application/json': components['schemas']['PostProjectLogsBody'] + 'application/json': components['schemas']['GetProjectLogsBody'] } } responses: { @@ -14517,7 +14685,7 @@ export interface operations { } content?: never } - /** @description Failed to POST project's logs */ + /** @description Failed to get project's logs */ 500: { headers: { [name: string]: unknown diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5968c9d1f35fa..f3ac524c098f1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -331,7 +331,7 @@ importers: version: 1.0.3(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@sentry/nextjs': specifier: ^9.15.0 - version: 9.15.0(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0)(supports-color@8.1.1))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0) + version: 9.15.0(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0)(supports-color@8.1.1))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@15.3.1(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0) '@supabase/supabase-js': specifier: 'catalog:' version: 2.49.3 @@ -451,7 +451,7 @@ importers: version: 0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) nuqs: specifier: ^1.19.1 - version: 1.19.1(next@15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4)) + version: 1.19.1(next@15.3.1(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4)) openai: specifier: ^4.20.1 version: 4.71.1(encoding@0.1.13)(zod@3.23.8) @@ -828,6 +828,9 @@ importers: html-to-image: specifier: ^1.10.8 version: 1.11.11 + http-status: + specifier: ^2.1.0 + version: 2.1.0 icons: specifier: workspace:* version: link:../../packages/icons @@ -1131,7 +1134,7 @@ importers: version: 2.4.11(typescript@5.5.2) next-router-mock: specifier: ^0.9.13 - version: 0.9.13(next@15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) + version: 0.9.13(next@15.3.1(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) postcss: specifier: ^8.5.3 version: 8.5.3 @@ -2287,7 +2290,7 @@ importers: version: link:../api-types next-router-mock: specifier: ^0.9.13 - version: 0.9.13(next@15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) + version: 0.9.13(next@15.3.1(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) tsx: specifier: ^4.19.3 version: 4.19.3 @@ -11032,7 +11035,6 @@ packages: resolution: {integrity: sha512-t0q23FIpvHDTtnORW+bDJziGsal5uh9RJTJ1fyH8drd4lICOoXhJ5pLMUZ5C0VQei6dNmwTzzoTRgMkO9JgHEQ==} peerDependencies: eslint: '>= 5' - bundledDependencies: [] eslint-plugin-import@2.31.0: resolution: {integrity: sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==} @@ -25381,7 +25383,7 @@ snapshots: - supports-color - webpack - '@sentry/nextjs@9.15.0(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0)(supports-color@8.1.1))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0)': + '@sentry/nextjs@9.15.0(@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.0)(supports-color@8.1.1))(@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@15.3.1(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.32.0 @@ -34321,7 +34323,7 @@ snapshots: dependencies: js-yaml-loader: 1.2.2 - next-router-mock@0.9.13(next@15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1): + next-router-mock@0.9.13(next@15.3.1(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1): dependencies: next: 15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(babel-plugin-macros@3.1.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) react: 18.3.1 @@ -34658,7 +34660,7 @@ snapshots: number-flow@0.3.7: {} - nuqs@1.19.1(next@15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4)): + nuqs@1.19.1(next@15.3.1(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4)): dependencies: mitt: 3.0.1 next: 15.3.1(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.52.0)(babel-plugin-macros@3.1.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4)