[
- {
- id: 'execution-status-codes',
- label: 'Edge Function Status Codes',
- valuePrecision: 0,
- hide: false,
- showTooltip: true,
- showLegend: true,
- showMaxValue: false,
- hideChartType: false,
- defaultChartStyle: 'bar',
- titleTooltip: 'The total number of edge function executions by status code.',
- availableIn: ['free', 'pro', 'team', 'enterprise'],
- attributes: [
- {
- attribute: 'ExecutionStatusCodes',
- provider: 'logs',
- label: 'Execution Status Codes',
- },
- ],
- },
- {
- id: 'execution-time',
- label: 'Edge Function Execution Time',
- valuePrecision: 0,
- hide: false,
- showTooltip: true,
- showLegend: true,
- showMaxValue: false,
- hideChartType: false,
- defaultChartStyle: 'line',
- titleTooltip: 'Average execution time for edge functions.',
- availableIn: ['free', 'pro', 'team', 'enterprise'],
- format: 'ms',
- YAxisProps: {
- width: 50,
- tickFormatter: (value: number) => `${value}ms`,
- },
- attributes: [
- {
- attribute: 'ExecutionTime',
- label: 'Avg. Execution Time (ms)',
- provider: 'logs',
- enabled: true,
- },
- ],
- },
- {
- id: 'invocations-by-region',
- label: 'Edge Function Invocations by Region',
- valuePrecision: 0,
- hide: false,
- showTooltip: true,
- showLegend: true,
- showMaxValue: false,
- hideChartType: false,
- defaultChartStyle: 'bar',
- titleTooltip: 'The total number of edge function invocations by region.',
- availableIn: ['pro', 'team', 'enterprise'],
- attributes: [
- {
- attribute: 'InvocationsByRegion',
- provider: 'logs',
- label: 'Invocations by Region',
- },
- ],
- },
-]
diff --git a/apps/studio/data/reports/edgefn-query.ts b/apps/studio/data/reports/edgefn-query.ts
deleted file mode 100644
index 1671f7e4ec411..0000000000000
--- a/apps/studio/data/reports/edgefn-query.ts
+++ /dev/null
@@ -1,415 +0,0 @@
-import { useQuery } from '@tanstack/react-query'
-import { get } from 'data/fetchers'
-import { AnalyticsInterval } from 'data/analytics/constants'
-import type { MultiAttribute } from 'components/ui/Charts/ComposedChart.utils'
-import { getHttpStatusCodeInfo } from 'lib/http-status-codes'
-import { analyticsIntervalToGranularity, useEdgeFnIdToName } from './report.utils'
-import { REPORT_STATUS_CODE_COLORS } from './report.utils'
-
-/**
- * METRICS
- * Each chart in the UI has a corresponding metric key.
- */
-
-const METRIC_KEYS = [
- 'TotalInvocations',
- 'ExecutionStatusCodes',
- 'InvocationsByRegion',
- 'ExecutionTime',
-]
-
-type MetricKey = (typeof METRIC_KEYS)[number]
-
-/**
- * SQL
- * Each metric has a corresponding SQL query.
- */
-
-const METRIC_SQL: Record<
- MetricKey,
- (interval: AnalyticsInterval, functionIds?: string[]) => string
-> = {
- TotalInvocations: (interval, functionIds) => {
- return `
---edgefn-report-invocations
-select
- timestamp_trunc(timestamp, ${analyticsIntervalToGranularity(interval)}) as timestamp,
- function_id,
- count(*) as count
-from
- function_edge_logs
- CROSS JOIN UNNEST(metadata) AS m
- CROSS JOIN UNNEST(m.request) AS request
- CROSS JOIN UNNEST(m.response) AS response
- ${
- functionIds && functionIds.length > 0
- ? `WHERE function_id IN (${functionIds.map((id) => `'${id}'`).join(',')})`
- : ''
- }
-group by
- timestamp,
- function_id
-order by
- timestamp desc;
-
- `
- },
- ExecutionStatusCodes: (interval, functionIds) => {
- return `
---edgefn-report-execution-status-codes
-select
- timestamp_trunc(timestamp, ${analyticsIntervalToGranularity(interval)}) as timestamp,
- response.status_code AS status_code,
- COUNT(*) AS count
-FROM
- function_edge_logs
- CROSS JOIN UNNEST(metadata) AS m
- CROSS JOIN UNNEST(m.response) AS response
- CROSS JOIN UNNEST(m.request) AS request
- ${
- functionIds && functionIds.length > 0
- ? `WHERE function_id IN (${functionIds.map((id) => `'${id}'`).join(',')})`
- : ''
- }
-group by
- timestamp,
- status_code
-order by
- timestamp desc;
- `
- },
- InvocationsByRegion: (interval, functionIds) => {
- const granularity = analyticsIntervalToGranularity(interval)
- return `
---edgefn-report-invocations-by-region
-select
- timestamp_trunc(timestamp, ${granularity}) as timestamp,
- h.x_sb_edge_region as region,
- count(*) as count
-from
- function_edge_logs
- cross join unnest(metadata) as m
- cross join unnest(m.response) as r
- cross join unnest(r.headers) as h
- where h.x_sb_edge_region is not null
- ${
- functionIds && functionIds.length > 0
- ? `and function_id IN (${functionIds.map((id) => `'${id}'`).join(',')})`
- : ''
- }
-group by
- timestamp,
- region
-order by
- timestamp desc
- `
- },
- ExecutionTime: (interval, functionIds) => {
- const granularity = analyticsIntervalToGranularity(interval)
- const hasFunctions = functionIds && functionIds.length > 0
- return `
---edgefn-report-execution-time
-select
- timestamp_trunc(timestamp, ${granularity}) as timestamp,
- ${hasFunctions ? 'function_id,' : ''}
- avg(m.execution_time_ms) as avg_execution_time
-from
- function_edge_logs
- cross join unnest(metadata) as m
- cross join unnest(m.request) as request
- ${hasFunctions ? `where function_id IN (${functionIds.map((id) => `'${id}'`).join(',')})` : ''}
-group by
- timestamp
- ${hasFunctions ? ', function_id' : ''}
-order by
- timestamp desc
- `
- },
-}
-
-/**
- * FORMATTERS.
- * Metrics need to be formatted before being passed on to the UI charts.
- */
-
-function defaultFormatter(rawData: any, attributes: MultiAttribute[]) {
- const chartAttributes = attributes
- if (!rawData) return { data: undefined, chartAttributes }
- const result = rawData.result || []
- const timestamps = new Set
(result.map((p: any) => p.timestamp))
- const data = Array.from(timestamps)
- .sort()
- .map((timestamp) => {
- const point: any = { period_start: timestamp }
- chartAttributes.forEach((attr) => {
- point[attr.attribute] = 0
- })
- const matchingPoints = result.filter((p: any) => p.timestamp === timestamp)
- matchingPoints.forEach((p: any) => {
- point[attributes[0].attribute] = p.count
- })
- return point
- })
- return { data, chartAttributes }
-}
-
-const METRIC_FORMATTER: Record<
- MetricKey,
- (
- rawData: any,
- attributes: MultiAttribute[],
- logsMetric: string,
- functionIds?: string[],
- edgeFnIdToName?: (id: string) => string | undefined
- ) => { data: any; chartAttributes: any }
-> = {
- TotalInvocations: (rawData, attributes, logsMetric, functionIds, edgeFnIdToName) => {
- // Always use dynamic attributes, so the chart can show per-function stats.
- if (!rawData) return { data: undefined, chartAttributes: attributes }
- const result = rawData.result || []
-
- const functionIdsInData = Array.from(new Set(result.map((p: any) => p.function_id))) as string[]
-
- const chartFunctionIds = functionIds && functionIds.length > 0 ? functionIds : functionIdsInData
-
- if (chartFunctionIds.length === 0) {
- return { data: [], chartAttributes: [] } // No data, empty chart
- }
-
- const chartAttributes = chartFunctionIds.map((id: string) => ({
- attribute: id,
- label: edgeFnIdToName?.(id) ?? id,
- provider: 'logs',
- enabled: true,
- }))
-
- const timestamps = new Set(result.map((p: any) => p.timestamp))
- const data = Array.from(timestamps)
- .sort()
- .map((timestamp) => {
- const point: any = { period_start: timestamp }
- chartAttributes.forEach((attr) => {
- point[attr.attribute] = 0
- })
- const matchingPoints = result.filter((p: any) => p.timestamp === timestamp)
- matchingPoints.forEach((p: any) => {
- point[p.function_id as string] = p.count
- })
- return point
- })
-
- return { data, chartAttributes }
- },
- ExecutionStatusCodes: (rawData, attributes) => {
- if (!rawData) return { data: undefined, chartAttributes: attributes }
- const result = rawData.result || []
-
- const statusCodes = Array.from(new Set(result.map((p: any) => p.status_code)))
-
- const chartAttributes = statusCodes.map((statusCode) => {
- const statusCodeInfo = getHttpStatusCodeInfo(Number(statusCode))
- const color =
- REPORT_STATUS_CODE_COLORS[String(statusCode)] || REPORT_STATUS_CODE_COLORS.default
-
- return {
- attribute: `status_${statusCode}`,
- label: `${statusCode} ${statusCodeInfo.label}`,
- provider: 'logs',
- enabled: true,
- color: color,
- statusCode: String(statusCode),
- }
- })
-
- const timestamps = new Set(result.map((p: any) => p.timestamp))
- const data = Array.from(timestamps)
- .sort()
- .map((timestamp) => {
- const point: any = { period_start: timestamp }
- chartAttributes.forEach((attr) => {
- point[attr.attribute] = 0
- })
- const matchingPoints = result.filter((p: any) => p.timestamp === timestamp)
- matchingPoints.forEach((p: any) => {
- point[`status_${p.status_code}`] = p.count
- })
- return point
- })
-
- return { data, chartAttributes }
- },
- InvocationsByRegion: (rawData, attributes) => {
- if (!rawData) return { data: undefined, chartAttributes: attributes }
- const result = rawData.result || []
-
- const regions = Array.from(new Set(result.map((p: any) => p.region))).filter(Boolean)
-
- if (regions.length === 0) {
- return { data: [], chartAttributes: [] } // No data, empty chart
- }
-
- const chartAttributes = regions.map((region) => {
- return {
- attribute: region,
- label: region,
- provider: 'logs',
- enabled: true,
- }
- })
-
- const timestamps = new Set(result.map((p: any) => p.timestamp))
- const data = Array.from(timestamps)
- .sort()
- .map((timestamp) => {
- const point: any = { period_start: timestamp }
- chartAttributes.forEach((attr) => {
- point[attr.attribute as string] = 0
- })
- const matchingPoints = result.filter((p: any) => p.timestamp === timestamp)
- matchingPoints.forEach((p: any) => {
- point[p.region] = p.count
- })
- return point
- })
-
- return { data, chartAttributes }
- },
- ExecutionTime: (rawData, attributes, logsMetric, functionIds, edgeFnIdToName) => {
- if (!rawData) return { data: undefined, chartAttributes: attributes }
- const result = rawData.result || []
- const hasFunctions = functionIds && functionIds.length > 0
-
- if (hasFunctions) {
- const chartAttributes = functionIds.map((id: string) => ({
- attribute: id,
- label: edgeFnIdToName?.(id) ?? id,
- provider: 'logs',
- enabled: true,
- }))
-
- if (result.length === 0) {
- return { data: [], chartAttributes }
- }
-
- const timestamps = new Set(result.map((p: any) => p.timestamp))
- const data = Array.from(timestamps)
- .sort()
- .map((timestamp) => {
- const point: any = { period_start: timestamp }
- chartAttributes.forEach((attr) => {
- point[attr.attribute] = 0
- })
- const matchingPoints = result.filter((p: any) => p.timestamp === timestamp)
- matchingPoints.forEach((p: any) => {
- point[p.function_id as string] = p.avg_execution_time
- })
- return point
- })
-
- return { data, chartAttributes }
- } else {
- const chartAttributes = [
- {
- attribute: 'avg_execution_time',
- label: 'Avg. execution time (ms)',
- provider: 'logs',
- enabled: true,
- },
- ]
-
- const data = result
- .map((p: any) => ({
- period_start: p.timestamp,
- avg_execution_time: p.avg_execution_time,
- }))
- .sort(
- (a: { period_start: string }, b: { period_start: string }) =>
- new Date(a.period_start).getTime() - new Date(b.period_start).getTime()
- )
-
- return { data, chartAttributes }
- }
- },
-}
-
-/**
- * REPORT QUERY.
- * Fetching and state management for the report.
- */
-
-export function useEdgeFunctionReport({
- projectRef,
- attributes,
- startDate,
- endDate,
- interval,
- enabled = true,
- functionIds,
-}: {
- projectRef: string
- attributes: MultiAttribute[]
- startDate: string
- endDate: string
- interval: AnalyticsInterval
- enabled?: boolean
- functionIds?: string[]
-}) {
- const logsMetric = attributes.length > 0 ? attributes[0].attribute : ''
- const { edgeFnIdToName } = useEdgeFnIdToName({ projectRef })
- const isEdgeFnMetric = METRIC_KEYS.includes(logsMetric)
-
- const sql = isEdgeFnMetric ? METRIC_SQL[logsMetric as MetricKey](interval, functionIds) : ''
-
- const {
- data: rawData,
- error,
- isLoading,
- } = useQuery(
- [
- 'edge-function-report',
- projectRef,
- logsMetric,
- startDate,
- endDate,
- interval,
- sql,
- functionIds,
- ],
- async () => {
- const { data, error } = await get(`/platform/projects/{ref}/analytics/endpoints/logs.all`, {
- params: {
- path: { ref: projectRef },
- query: {
- sql,
- iso_timestamp_start: startDate,
- iso_timestamp_end: endDate,
- },
- },
- })
- if (error) throw error
- return data
- },
- {
- enabled: Boolean(projectRef && sql && enabled && isEdgeFnMetric),
- refetchOnWindowFocus: false,
- }
- )
-
- // Use formatter if available
- const formatter =
- (isEdgeFnMetric ? METRIC_FORMATTER[logsMetric as MetricKey] : undefined) || defaultFormatter
- const { data, chartAttributes } = formatter(
- rawData,
- attributes,
- logsMetric,
- functionIds,
- edgeFnIdToName
- )
-
- return {
- data,
- attributes: chartAttributes,
- isLoading,
- error,
- }
-}
diff --git a/apps/studio/data/reports/v2/edge-functions.config.ts b/apps/studio/data/reports/v2/edge-functions.config.ts
new file mode 100644
index 0000000000000..2e65c9252326a
--- /dev/null
+++ b/apps/studio/data/reports/v2/edge-functions.config.ts
@@ -0,0 +1,395 @@
+import dayjs from 'dayjs'
+
+import {
+ isUnixMicro,
+ unixMicroToIsoTimestamp,
+} from 'components/interfaces/Settings/Logs/Logs.utils'
+import type { AnalyticsInterval } from 'data/analytics/constants'
+import { get } from 'data/fetchers'
+import {
+ analyticsIntervalToGranularity,
+ REPORT_STATUS_CODE_COLORS,
+} from 'data/reports/report.utils'
+import { getHttpStatusCodeInfo } from 'lib/http-status-codes'
+import { ReportConfig } from './reports.types'
+
+const METRIC_SQL: Record string> =
+ {
+ TotalInvocations: (interval, functionIds) => {
+ return `
+--edgefn-report-invocations
+select
+ timestamp_trunc(timestamp, ${analyticsIntervalToGranularity(interval)}) as timestamp,
+ function_id,
+ count(*) as count
+from
+ function_edge_logs
+ CROSS JOIN UNNEST(metadata) AS m
+ CROSS JOIN UNNEST(m.request) AS request
+ CROSS JOIN UNNEST(m.response) AS response
+ ${
+ functionIds && functionIds.length > 0
+ ? `WHERE function_id IN (${functionIds.map((id) => `'${id}'`).join(',')})`
+ : ''
+ }
+group by
+ timestamp,
+ function_id
+order by
+ timestamp desc;
+`
+ },
+ ExecutionStatusCodes: (interval, functionIds) => {
+ return `
+--edgefn-report-execution-status-codes
+select
+ timestamp_trunc(timestamp, ${analyticsIntervalToGranularity(interval)}) as timestamp,
+ response.status_code as status_code,
+ count(response.status_code) as count
+from
+ function_edge_logs
+ cross join unnest(metadata) as m
+ cross join unnest(m.response) as response
+ ${
+ functionIds && functionIds.length > 0
+ ? `where function_id in (${functionIds.map((id) => `'${id}'`).join(',')})`
+ : ''
+ }
+group by
+ timestamp,
+ status_code
+order by
+ timestamp desc
+`
+ },
+ InvocationsByRegion: (interval, functionIds) => {
+ const granularity = analyticsIntervalToGranularity(interval)
+ return `
+--edgefn-report-invocations-by-region
+select
+ timestamp_trunc(timestamp, ${granularity}) as timestamp,
+ h.x_sb_edge_region as region,
+ count(*) as count
+from
+ function_edge_logs
+ cross join unnest(metadata) as m
+ cross join unnest(m.response) as r
+ cross join unnest(r.headers) as h
+ where h.x_sb_edge_region is not null
+ ${
+ functionIds && functionIds.length > 0
+ ? `and function_id IN (${functionIds.map((id) => `'${id}'`).join(',')})`
+ : ''
+ }
+group by
+ timestamp,
+ region
+order by
+ timestamp desc
+`
+ },
+ ExecutionTime: (interval, functionIds) => {
+ const granularity = analyticsIntervalToGranularity(interval)
+ const hasFunctions = functionIds && functionIds.length > 0
+ return `
+--edgefn-report-execution-time
+select
+ timestamp_trunc(timestamp, ${granularity}) as timestamp,
+ ${hasFunctions ? 'function_id,' : ''}
+ avg(m.execution_time_ms) as avg_execution_time
+from
+ function_edge_logs
+ cross join unnest(metadata) as m
+ cross join unnest(m.request) as request
+ ${hasFunctions ? `where function_id IN (${functionIds.map((id) => `'${id}'`).join(',')})` : ''}
+group by
+ timestamp
+ ${hasFunctions ? ', function_id' : ''}
+order by
+ timestamp desc
+`
+ },
+ }
+
+async function runQuery(projectRef: string, sql: string, startDate: string, endDate: string) {
+ const { data, error } = await get(`/platform/projects/{ref}/analytics/endpoints/logs.all`, {
+ params: {
+ path: { ref: projectRef },
+ query: {
+ sql,
+ iso_timestamp_start: startDate,
+ iso_timestamp_end: endDate,
+ },
+ },
+ })
+ if (error) throw error
+ return data
+}
+
+export function extractStatusCodesFromData(data: any[]): string[] {
+ const statusCodes = new Set()
+
+ data.forEach((item: any) => {
+ if (item.status_code) {
+ statusCodes.add(String(item.status_code))
+ }
+ })
+
+ return Array.from(statusCodes).sort()
+}
+
+export function generateStatusCodeAttributes(statusCodes: string[]) {
+ return statusCodes.map((code) => ({
+ attribute: code,
+ label: `${code} ${getHttpStatusCodeInfo(parseInt(code)).label}`,
+ color: REPORT_STATUS_CODE_COLORS[code] || REPORT_STATUS_CODE_COLORS.default,
+ }))
+}
+
+/**
+ * Converts a list of { timestamp, status_code, count }
+ * to a list of { timestamp, [status_code]: count }
+ * That we can pass to the chart for rendering
+ */
+export function transformStatusCodeData(data: any[], statusCodes: string[]) {
+ const pivotedData = data.reduce((acc: Record, d: any) => {
+ const timestamp = isUnixMicro(d.timestamp)
+ ? unixMicroToIsoTimestamp(d.timestamp)
+ : dayjs.utc(d.timestamp).toISOString()
+ if (!acc[timestamp]) {
+ acc[timestamp] = { timestamp }
+ statusCodes.forEach((code) => {
+ acc[timestamp][code] = 0
+ })
+ }
+ acc[timestamp][d.status_code] = d.count
+ return acc
+ }, {})
+
+ return Object.values(pivotedData)
+}
+
+/**
+ * Transforms raw invocation data by normalizing timestamps and adding function names
+ * @param data - Raw data from the database
+ * @param functions - Array of function objects with id and name
+ * @returns Transformed data with normalized timestamps and function names
+ */
+export function transformInvocationData(data: any[], functions: { id: string; name: string }[]) {
+ return data.map((log: any) => ({
+ ...log,
+ timestamp: isUnixMicro(log.timestamp)
+ ? unixMicroToIsoTimestamp(log.timestamp)
+ : dayjs.utc(log.timestamp).toISOString(),
+ function_name: functions.find((f) => f.id === log.function_id)?.name ?? log.function_id,
+ }))
+}
+
+/**
+ * Aggregates invocation data by timestamp, summing counts for each timestamp
+ * @param data - Transformed invocation data
+ * @returns Aggregated data with one entry per timestamp
+ */
+export function aggregateInvocationsByTimestamp(data: any[]) {
+ const aggregatedData = data.reduce((acc: Record, item: any) => {
+ const timestamp = item.timestamp
+ if (!acc[timestamp]) {
+ acc[timestamp] = { timestamp, count: 0 }
+ }
+ acc[timestamp].count += item.count
+ return acc
+ }, {})
+
+ return Object.values(aggregatedData)
+}
+
+export const edgeFunctionReports = ({
+ projectRef,
+ functions,
+ startDate,
+ endDate,
+ interval,
+ filters,
+}: {
+ projectRef: string
+ functions: { id: string; name: string }[]
+ startDate: string
+ endDate: string
+ interval: AnalyticsInterval
+ filters: {
+ functionIds?: string[]
+ }
+}): ReportConfig[] => [
+ {
+ id: 'total-invocations',
+ label: 'Total Edge Function Invocations',
+ valuePrecision: 0,
+ hide: false,
+ showTooltip: true,
+ showLegend: true,
+ showMaxValue: false,
+ hideChartType: false,
+ defaultChartStyle: 'line',
+ titleTooltip: 'The total number of edge function invocations over time.',
+ availableIn: ['free', 'pro', 'team', 'enterprise'],
+ dataProvider: async () => {
+ const sql = METRIC_SQL.TotalInvocations(interval, filters.functionIds)
+ const response = await runQuery(projectRef, sql, startDate, endDate)
+
+ if (!response?.result) return { data: [] }
+
+ // Transform and aggregate the data using extracted functions
+ const transformedData = transformInvocationData(response.result, functions)
+ const data = aggregateInvocationsByTimestamp(transformedData)
+
+ const attributes = [
+ {
+ attribute: 'count',
+ label: 'Count',
+ },
+ ]
+
+ return { data, attributes, query: sql }
+ },
+ },
+ {
+ id: 'execution-status-codes',
+ label: 'Edge Function Status Codes',
+ valuePrecision: 0,
+ hide: false,
+ showTooltip: true,
+ showLegend: true,
+ showMaxValue: false,
+ hideChartType: false,
+ defaultChartStyle: 'line',
+ titleTooltip: 'The total number of edge function executions by status code.',
+ availableIn: ['free', 'pro', 'team', 'enterprise'],
+ dataProvider: async (
+ projectRef: string,
+ startDate: string,
+ endDate: string,
+ interval: AnalyticsInterval,
+ functionIds?: string[]
+ ) => {
+ const sql = METRIC_SQL.ExecutionStatusCodes(interval, functionIds)
+ const rawData = await runQuery(projectRef, sql, startDate, endDate)
+
+ if (!rawData?.result) return { data: [] }
+
+ /**
+ * The query returns { timestamp, status_code: 500, count: 10 }
+ * and we have to transform it to { timestamp, 500: 10 }
+ * to be able to render the chart.
+ */
+
+ const statusCodes = extractStatusCodesFromData(rawData.result)
+ const attributes = generateStatusCodeAttributes(statusCodes)
+
+ const data = transformStatusCodeData(rawData.result, statusCodes)
+
+ return { data, attributes, query: sql }
+ },
+ },
+ {
+ id: 'execution-time',
+ label: 'Edge Function Execution Time',
+ valuePrecision: 0,
+ hide: false,
+ showTooltip: true,
+ showLegend: true,
+ showMaxValue: false,
+ hideChartType: false,
+ defaultChartStyle: 'line',
+ titleTooltip: 'Average execution time for edge functions.',
+ availableIn: ['free', 'pro', 'team', 'enterprise'],
+ YAxisProps: {
+ width: 50,
+ tickFormatter: (value: number) => `${value}ms`,
+ },
+ format: (value: unknown) => `${Number(value).toFixed(0)}ms`,
+ dataProvider: async () => {
+ const sql = METRIC_SQL.ExecutionTime(interval, filters.functionIds)
+ const rawData = await runQuery(projectRef, sql, startDate, endDate)
+
+ if (!rawData?.result) return { data: [] }
+
+ // Transform the raw data to ensure one data point per timestamp
+ const transformedData = rawData.result?.map((point: any) => ({
+ ...point,
+ timestamp: isUnixMicro(point.timestamp)
+ ? unixMicroToIsoTimestamp(point.timestamp)
+ : dayjs.utc(point.timestamp).toISOString(),
+ function_name: functions.find((f) => f.id === point.function_id)?.name ?? point.function_id,
+ }))
+
+ // If we have multiple function IDs, we need to aggregate the execution times per timestamp
+ const aggregatedData = transformedData.reduce((acc: Record, item: any) => {
+ const timestamp = item.timestamp
+ if (!acc[timestamp]) {
+ acc[timestamp] = {
+ timestamp,
+ avg_execution_time: item.avg_execution_time,
+ count: 1,
+ }
+ } else {
+ // Calculate weighted average for multiple functions at the same timestamp
+ const totalTime =
+ acc[timestamp].avg_execution_time * acc[timestamp].count + item.avg_execution_time
+ acc[timestamp].count += 1
+ acc[timestamp].avg_execution_time = totalTime / acc[timestamp].count
+ }
+ return acc
+ }, {})
+
+ const data = Object.values(aggregatedData).map(({ count, ...item }) => item)
+
+ const attributes = [
+ {
+ attribute: 'avg_execution_time',
+ label: 'Avg. execution time (ms)',
+ },
+ ]
+ return { data, attributes, query: sql }
+ },
+ },
+ {
+ id: 'invocations-by-region',
+ label: 'Edge Function Invocations by Region',
+ valuePrecision: 0,
+ hide: false,
+ showTooltip: true,
+ showLegend: true,
+ showMaxValue: false,
+ hideChartType: false,
+ defaultChartStyle: 'line',
+ titleTooltip: 'The total number of edge function invocations by region.',
+ availableIn: ['pro', 'team', 'enterprise'],
+ dataProvider: async () => {
+ const sql = METRIC_SQL.InvocationsByRegion(interval, filters.functionIds)
+ const rawData = await runQuery(projectRef, sql, startDate, endDate)
+ const data = rawData.result?.map((point: any) => ({
+ ...point,
+ timestamp: isUnixMicro(point.timestamp)
+ ? unixMicroToIsoTimestamp(point.timestamp)
+ : dayjs.utc(point.timestamp).toISOString(),
+ }))
+
+ const attributes = [
+ {
+ attribute: 'region',
+ label: 'Region',
+ provider: 'logs',
+ enabled: true,
+ },
+ {
+ attribute: 'count',
+ label: 'Count',
+ provider: 'logs',
+ enabled: true,
+ },
+ ]
+
+ return { data, attributes, query: sql }
+ },
+ },
+]
diff --git a/apps/studio/data/reports/v2/edge-functions.test.tsx b/apps/studio/data/reports/v2/edge-functions.test.tsx
new file mode 100644
index 0000000000000..e1217d7b7a024
--- /dev/null
+++ b/apps/studio/data/reports/v2/edge-functions.test.tsx
@@ -0,0 +1,251 @@
+import { describe, expect, it } from 'vitest'
+import {
+ extractStatusCodesFromData,
+ generateStatusCodeAttributes,
+ transformStatusCodeData,
+ transformInvocationData,
+ aggregateInvocationsByTimestamp,
+} from './edge-functions.config'
+
+describe('extractStatusCodesFromData', () => {
+ it('should extract and sort unique status codes from the data', () => {
+ const data = [
+ { status_code: 200 },
+ { status_code: 500 },
+ { status_code: 200 },
+ { status_code: 404 },
+ ]
+ const result = extractStatusCodesFromData(data)
+ expect(result).toEqual(['200', '404', '500'])
+ })
+
+ it('should handle an empty array', () => {
+ const data: any[] = []
+ const result = extractStatusCodesFromData(data)
+ expect(result).toEqual([])
+ })
+
+ it('should handle data with missing status_code properties', () => {
+ const data = [{ status_code: 200 }, {}, { status_code: 500 }]
+ const result = extractStatusCodesFromData(data)
+ expect(result).toEqual(['200', '500'])
+ })
+
+ it('should handle various data types for status_code', () => {
+ const data = [{ status_code: 200 }, { status_code: '500' }, { status_code: 404 }]
+ const result = extractStatusCodesFromData(data)
+ expect(result).toEqual(['200', '404', '500'])
+ })
+})
+
+describe('generateStatusCodeAttributes', () => {
+ it('should generate the correct attributes for a list of status codes', () => {
+ const statusCodes = ['200', '404', '500']
+ const result = generateStatusCodeAttributes(statusCodes)
+ expect(result.map(({ color, ...rest }) => rest)).toEqual([
+ {
+ attribute: '200',
+ label: '200 OK',
+ },
+ {
+ attribute: '404',
+ label: '404 Not Found',
+ },
+ {
+ attribute: '500',
+ label: '500 Internal Server Error',
+ },
+ ])
+ })
+
+ it('should handle an empty array', () => {
+ const statusCodes: string[] = []
+ const result = generateStatusCodeAttributes(statusCodes)
+ expect(result).toEqual([])
+ })
+})
+
+describe('transformStatusCodeData', () => {
+ it('should pivot the data correctly', () => {
+ const data = [
+ { timestamp: '2023-01-01T00:00:00Z', status_code: 200, count: 10 },
+ { timestamp: '2023-01-01T00:00:00Z', status_code: 500, count: 5 },
+ { timestamp: '2023-01-02T00:00:00Z', status_code: 200, count: 20 },
+ ]
+ const result = transformStatusCodeData(data, ['200', '500'])
+ expect(result).toEqual([
+ { timestamp: '2023-01-01T00:00:00.000Z', '200': 10, '500': 5 },
+ { timestamp: '2023-01-02T00:00:00.000Z', '200': 20, '500': 0 },
+ ])
+ })
+
+ it('should handle an empty array', () => {
+ const data: any[] = []
+ const result = transformStatusCodeData(data, [])
+ expect(result).toEqual([])
+ })
+
+ it('should handle a single entry', () => {
+ const data = [{ timestamp: '2023-01-01T00:00:00Z', status_code: 200, count: 10 }]
+ const result = transformStatusCodeData(data, ['200', '404'])
+ expect(result).toEqual([{ timestamp: '2023-01-01T00:00:00.000Z', '200': 10, '404': 0 }])
+ })
+})
+
+describe('transformInvocationData', () => {
+ const mockFunctions = [
+ { id: 'func1', name: 'Function One' },
+ { id: 'func2', name: 'Function Two' },
+ ]
+
+ it('should transform raw invocation data with function names', () => {
+ const rawData = [
+ {
+ timestamp: '2023-01-01T00:00:00Z',
+ function_id: 'func1',
+ count: 10,
+ },
+ {
+ timestamp: '2023-01-01T01:00:00Z',
+ function_id: 'func2',
+ count: 5,
+ },
+ ]
+
+ const result = transformInvocationData(rawData, mockFunctions)
+ expect(result).toEqual([
+ {
+ timestamp: '2023-01-01T00:00:00.000Z',
+ function_id: 'func1',
+ count: 10,
+ function_name: 'Function One',
+ },
+ {
+ timestamp: '2023-01-01T01:00:00.000Z',
+ function_id: 'func2',
+ count: 5,
+ function_name: 'Function Two',
+ },
+ ])
+ })
+
+ it('should use function_id as fallback when function name not found', () => {
+ const rawData = [
+ {
+ timestamp: '2023-01-01T00:00:00Z',
+ function_id: 'unknown_func',
+ count: 10,
+ },
+ ]
+
+ const result = transformInvocationData(rawData, mockFunctions)
+ expect(result).toEqual([
+ {
+ timestamp: '2023-01-01T00:00:00.000Z',
+ function_id: 'unknown_func',
+ count: 10,
+ function_name: 'unknown_func',
+ },
+ ])
+ })
+
+ it('should handle unix micro timestamps', () => {
+ const rawData = [
+ {
+ timestamp: 1672531200000000, // Unix micro timestamp for 2023-01-01T00:00:00Z
+ function_id: 'func1',
+ count: 10,
+ },
+ ]
+
+ const result = transformInvocationData(rawData, mockFunctions)
+ expect(result[0].timestamp).toBe('2023-01-01T00:00:00.000Z')
+ expect(result[0].function_name).toBe('Function One')
+ })
+
+ it('should handle empty data array', () => {
+ const result = transformInvocationData([], mockFunctions)
+ expect(result).toEqual([])
+ })
+
+ it('should handle empty functions array', () => {
+ const rawData = [
+ {
+ timestamp: '2023-01-01T00:00:00Z',
+ function_id: 'func1',
+ count: 10,
+ },
+ ]
+
+ const result = transformInvocationData(rawData, [])
+ expect(result).toEqual([
+ {
+ timestamp: '2023-01-01T00:00:00.000Z',
+ function_id: 'func1',
+ count: 10,
+ function_name: 'func1',
+ },
+ ])
+ })
+})
+
+describe('aggregateInvocationsByTimestamp', () => {
+ it('should aggregate counts by timestamp', () => {
+ const data = [
+ { timestamp: '2023-01-01T00:00:00.000Z', function_id: 'func1', count: 10 },
+ { timestamp: '2023-01-01T00:00:00.000Z', function_id: 'func2', count: 5 },
+ { timestamp: '2023-01-01T01:00:00.000Z', function_id: 'func1', count: 20 },
+ ]
+
+ const result = aggregateInvocationsByTimestamp(data)
+ expect(result).toEqual([
+ { timestamp: '2023-01-01T00:00:00.000Z', count: 15 },
+ { timestamp: '2023-01-01T01:00:00.000Z', count: 20 },
+ ])
+ })
+
+ it('should handle single entry per timestamp', () => {
+ const data = [
+ { timestamp: '2023-01-01T00:00:00.000Z', function_id: 'func1', count: 10 },
+ { timestamp: '2023-01-01T01:00:00.000Z', function_id: 'func2', count: 5 },
+ ]
+
+ const result = aggregateInvocationsByTimestamp(data)
+ expect(result).toEqual([
+ { timestamp: '2023-01-01T00:00:00.000Z', count: 10 },
+ { timestamp: '2023-01-01T01:00:00.000Z', count: 5 },
+ ])
+ })
+
+ it('should handle empty data array', () => {
+ const result = aggregateInvocationsByTimestamp([])
+ expect(result).toEqual([])
+ })
+
+ it('should handle multiple entries with same timestamp and different counts', () => {
+ const data = [
+ { timestamp: '2023-01-01T00:00:00.000Z', function_id: 'func1', count: 1 },
+ { timestamp: '2023-01-01T00:00:00.000Z', function_id: 'func2', count: 2 },
+ { timestamp: '2023-01-01T00:00:00.000Z', function_id: 'func3', count: 3 },
+ ]
+
+ const result = aggregateInvocationsByTimestamp(data)
+ expect(result).toEqual([{ timestamp: '2023-01-01T00:00:00.000Z', count: 6 }])
+ })
+
+ it('should preserve timestamp order from reduce operation', () => {
+ const data = [
+ { timestamp: '2023-01-01T02:00:00.000Z', function_id: 'func1', count: 30 },
+ { timestamp: '2023-01-01T00:00:00.000Z', function_id: 'func1', count: 10 },
+ { timestamp: '2023-01-01T01:00:00.000Z', function_id: 'func1', count: 20 },
+ ]
+
+ const result = aggregateInvocationsByTimestamp(data)
+ expect(result).toHaveLength(3)
+ expect(result.map((item) => item.timestamp)).toEqual([
+ '2023-01-01T02:00:00.000Z',
+ '2023-01-01T00:00:00.000Z',
+ '2023-01-01T01:00:00.000Z',
+ ])
+ })
+})
diff --git a/apps/studio/data/reports/v2/reports.types.ts b/apps/studio/data/reports/v2/reports.types.ts
new file mode 100644
index 0000000000000..5edd0b2379167
--- /dev/null
+++ b/apps/studio/data/reports/v2/reports.types.ts
@@ -0,0 +1,45 @@
+import { AnalyticsInterval } from 'data/analytics/constants'
+import { YAxisProps } from 'recharts'
+
+type ReportDataProviderFilter = {
+ functionIds?: string[]
+}
+
+export interface ReportDataProvider {
+ (
+ projectRef: string,
+ startDate: string,
+ endDate: string,
+ interval: AnalyticsInterval,
+ functionIds?: string[],
+ edgeFnIdToName?: (id: string) => string | undefined,
+ filters?: ReportDataProviderFilter[]
+ ): Promise<{
+ data: any
+ attributes?: {
+ attribute: string
+ label: string
+ color?: { light: string; dark: string }
+ }[]
+ query?: string // The SQL used to fetch the data if any
+ }> // [jordi] would be cool to have a type that forces data keys to match the attributes
+}
+
+export interface ReportConfig {
+ id: string
+ label: string
+ dataProvider: ReportDataProvider
+ valuePrecision: number
+ hide: boolean
+ showTooltip: boolean
+ showLegend: boolean
+ showMaxValue: boolean
+ hideChartType: boolean
+ defaultChartStyle: string
+ titleTooltip: string
+ availableIn: string[]
+ format?: (value: unknown) => string
+ YAxisProps?: YAxisProps
+ xAxisKey?: string
+ yAxisKey?: string
+}
diff --git a/apps/studio/hooks/custom-content/CustomContent.types.ts b/apps/studio/hooks/custom-content/CustomContent.types.ts
index 66816dffa963a..28568d054b327 100644
--- a/apps/studio/hooks/custom-content/CustomContent.types.ts
+++ b/apps/studio/hooks/custom-content/CustomContent.types.ts
@@ -1,4 +1,5 @@
-import { ConnectionType } from 'components/interfaces/Connect/Connect.constants'
+import { CONNECTION_TYPES } from 'components/interfaces/Connect/Connect.constants'
+import type { CloudProvider } from 'shared-data'
export type CustomContentTypes = {
organizationLegalDocuments: {
@@ -25,9 +26,7 @@ export type CustomContentTypes = {
*
* These could be helpful in rendering, for e.g an environment file like `.env`
*/
- connectFrameworks: {
- key: string
- label: string
- obj: ConnectionType[]
- }
+ connectFrameworks: (typeof CONNECTION_TYPES)[number]
+
+ infraCloudProviders: CloudProvider[]
}
diff --git a/apps/studio/hooks/custom-content/custom-content.json b/apps/studio/hooks/custom-content/custom-content.json
index 1378a1822a96f..57bd6db86b714 100644
--- a/apps/studio/hooks/custom-content/custom-content.json
+++ b/apps/studio/hooks/custom-content/custom-content.json
@@ -7,5 +7,7 @@
"logs:default_query": null,
- "connect:frameworks": null
+ "connect:frameworks": null,
+
+ "infra:cloud_providers": ["AWS", "AWS_K8S", "FLY"]
}
diff --git a/apps/studio/hooks/custom-content/custom-content.sample.json b/apps/studio/hooks/custom-content/custom-content.sample.json
index 7992201514dc8..da5a880f342ee 100644
--- a/apps/studio/hooks/custom-content/custom-content.sample.json
+++ b/apps/studio/hooks/custom-content/custom-content.sample.json
@@ -72,5 +72,7 @@
]
}
]
- }
+ },
+
+ "infra:cloud_providers": ["AWS_NIMBUS"]
}
diff --git a/apps/studio/hooks/custom-content/custom-content.schema.json b/apps/studio/hooks/custom-content/custom-content.schema.json
index 6d7020a3447ae..adea1a5f9feec 100644
--- a/apps/studio/hooks/custom-content/custom-content.schema.json
+++ b/apps/studio/hooks/custom-content/custom-content.schema.json
@@ -72,13 +72,23 @@
}
}
}
+ },
+
+ "infra:cloud_providers": {
+ "type": ["array"],
+ "description": "Filters the valid cloud providers",
+ "items": {
+ "type": "string",
+ "enum": ["AWS", "AWS_K8S", "AWS_NIMBUS", "FLY"]
+ }
}
},
"required": [
"organization:legal_documents",
"project_homepage:example_projects",
"logs:default_query",
- "connect:frameworks"
+ "connect:frameworks",
+ "infra:cloud_providers"
],
"additionalProperties": false
}
diff --git a/apps/studio/hooks/custom-content/useCustomContent.ts b/apps/studio/hooks/custom-content/useCustomContent.ts
index 0d1b23426420e..8f26fe1f3971f 100644
--- a/apps/studio/hooks/custom-content/useCustomContent.ts
+++ b/apps/studio/hooks/custom-content/useCustomContent.ts
@@ -28,16 +28,16 @@ const useCustomContent = (
contents: T
): {
[key in CustomContentToCamelCase]:
- | (typeof customContentStaticObj)[CustomContent]
| CustomContentTypes[CustomContentToCamelCase]
+ | null
} => {
// [Joshen] Running into some TS errors without the `as` here - must be overlooking something super simple
return Object.fromEntries(
contents.map((content) => [contentToCamelCase(content), customContentStaticObj[content]])
) as {
[key in CustomContentToCamelCase]:
- | (typeof customContentStaticObj)[CustomContent]
| CustomContentTypes[CustomContentToCamelCase]
+ | null
}
}
diff --git a/apps/studio/hooks/misc/useReportDateRange.ts b/apps/studio/hooks/misc/useReportDateRange.ts
index af9502de12411..bd1be26821ced 100644
--- a/apps/studio/hooks/misc/useReportDateRange.ts
+++ b/apps/studio/hooks/misc/useReportDateRange.ts
@@ -9,6 +9,7 @@ import {
} from 'components/interfaces/Reports/Reports.constants'
import { useCurrentOrgPlan } from 'hooks/misc/useCurrentOrgPlan'
import { maybeShowUpgradePrompt } from 'components/interfaces/Settings/Logs/Logs.utils'
+import { AnalyticsInterval } from 'data/analytics/constants'
export const DATERANGE_LIMITS: { [key: string]: number } = {
free: 1,
@@ -20,7 +21,7 @@ export const DATERANGE_LIMITS: { [key: string]: number } = {
export interface ReportDateRange {
period_start: { date: string; time_period: string }
period_end: { date: string; time_period: string }
- interval: string
+ interval: AnalyticsInterval
}
// Create parsers for individual URL parameters
diff --git a/apps/studio/hooks/useChartData.ts b/apps/studio/hooks/useChartData.ts
index 63998abc939a1..29f129b464a95 100644
--- a/apps/studio/hooks/useChartData.ts
+++ b/apps/studio/hooks/useChartData.ts
@@ -15,7 +15,6 @@ import type { AnalyticsInterval, DataPoint } from 'data/analytics/constants'
import { useAuthLogsReport } from 'data/reports/auth-report-query'
import type { ChartData } from 'components/ui/Charts/Charts.types'
import type { MultiAttribute } from 'components/ui/Charts/ComposedChart.utils'
-import { useEdgeFunctionReport } from 'data/reports/edgefn-query'
export const useChartData = ({
attributes,
@@ -56,25 +55,9 @@ export const useChartData = ({
enabled: enabled && logsAttributes.length > 0 && !isEdgeFunctionRoute,
})
- const {
- data: edgeFunctionData,
- attributes: edgeFunctionChartAttributes,
- isLoading: isEdgeFunctionLoading,
- } = useEdgeFunctionReport({
- projectRef: ref as string,
- attributes: logsAttributes,
- startDate,
- endDate,
- interval: interval as AnalyticsInterval,
- enabled: enabled && logsAttributes.length > 0 && isEdgeFunctionRoute,
- functionIds,
- })
-
- const logsData = isEdgeFunctionRoute ? edgeFunctionData : authData
- const logsChartAttributes = isEdgeFunctionRoute
- ? edgeFunctionChartAttributes
- : authChartAttributes
- const isLogsLoading = isEdgeFunctionRoute ? isEdgeFunctionLoading : isAuthLoading
+ const logsData = authData
+ const logsChartAttributes = authChartAttributes
+ const isLogsLoading = isAuthLoading
const combinedData = useMemo(() => {
if (data) return data
diff --git a/apps/studio/lib/constants/infrastructure.ts b/apps/studio/lib/constants/infrastructure.ts
index 4d2d4ffbf66a4..d9343193b46cc 100644
--- a/apps/studio/lib/constants/infrastructure.ts
+++ b/apps/studio/lib/constants/infrastructure.ts
@@ -2,6 +2,7 @@ import type { CloudProvider } from 'shared-data'
import { AWS_REGIONS, FLY_REGIONS } from 'shared-data'
import type { components } from 'data/api'
+import { useCustomContent } from 'hooks/custom-content/useCustomContent'
export const AWS_REGIONS_DEFAULT =
process.env.NEXT_PUBLIC_ENVIRONMENT !== 'prod'
@@ -33,11 +34,21 @@ export const PRICING_TIER_PRODUCT_IDS = {
ENTERPRISE: 'tier_enterprise',
}
-export const DEFAULT_PROVIDER: CloudProvider =
- process.env.NEXT_PUBLIC_ENVIRONMENT &&
- ['staging', 'preview'].includes(process.env.NEXT_PUBLIC_ENVIRONMENT)
- ? 'AWS_K8S'
- : 'AWS'
+export function useDefaultProvider() {
+ const defaultProvider: CloudProvider =
+ process.env.NEXT_PUBLIC_ENVIRONMENT &&
+ ['staging', 'preview'].includes(process.env.NEXT_PUBLIC_ENVIRONMENT)
+ ? 'AWS_K8S'
+ : 'AWS'
+
+ const { infraCloudProviders: validCloudProviders } = useCustomContent(['infra:cloud_providers'])
+
+ if (validCloudProviders?.includes(defaultProvider)) {
+ return defaultProvider
+ }
+
+ return (validCloudProviders?.[0] ?? 'AWS') as CloudProvider
+}
export const PROVIDERS = {
FLY: {
@@ -60,6 +71,12 @@ export const PROVIDERS = {
default_region: AWS_REGIONS_DEFAULT,
regions: { ...AWS_REGIONS },
},
+ AWS_NIMBUS: {
+ id: 'AWS_NIMBUS',
+ name: 'AWS (Nimbus)',
+ default_region: AWS_REGIONS_DEFAULT,
+ regions: { ...AWS_REGIONS },
+ },
} as const
export const PROJECT_STATUS: {
diff --git a/apps/studio/pages/new/[slug].tsx b/apps/studio/pages/new/[slug].tsx
index fc11b85f88a5a..297dc3d709dba 100644
--- a/apps/studio/pages/new/[slug].tsx
+++ b/apps/studio/pages/new/[slug].tsx
@@ -48,6 +48,7 @@ import {
} from 'data/projects/project-create-mutation'
import { useProjectsQuery } from 'data/projects/projects-query'
import { useSendEventMutation } from 'data/telemetry/send-event-mutation'
+import { useCustomContent } from 'hooks/custom-content/useCustomContent'
import { useCheckPermissions } from 'hooks/misc/useCheckPermissions'
import { useIsFeatureEnabled } from 'hooks/misc/useIsFeatureEnabled'
import { useLocalStorageQuery } from 'hooks/misc/useLocalStorage'
@@ -57,11 +58,11 @@ import { getCloudProviderArchitecture } from 'lib/cloudprovider-utils'
import {
AWS_REGIONS_DEFAULT,
DEFAULT_MINIMUM_PASSWORD_STRENGTH,
- DEFAULT_PROVIDER,
FLY_REGIONS_DEFAULT,
MANAGED_BY,
PROJECT_STATUS,
PROVIDERS,
+ useDefaultProvider,
} from 'lib/constants'
import passwordStrength from 'lib/password-strength'
import { generateStrongPassword } from 'lib/project'
@@ -141,6 +142,8 @@ const Wizard: NextPageWithLayout = () => {
const showAdvancedConfig = useIsFeatureEnabled('project_creation:show_advanced_config')
+ const { infraCloudProviders: validCloudProviders } = useCustomContent(['infra:cloud_providers'])
+
// This is to make the database.new redirect work correctly. The database.new redirect should be set to supabase.com/dashboard/new/last-visited-org
if (slug === 'last-visited-org') {
if (lastVisitedOrganization) {
@@ -221,9 +224,11 @@ const Wizard: NextPageWithLayout = () => {
project.organization_id === currentOrg?.id && project.status !== PROJECT_STATUS.INACTIVE
) ?? []
+ const defaultProvider = useDefaultProvider()
+
const { data: _defaultRegion, error: defaultRegionError } = useDefaultRegionQuery(
{
- cloudProvider: PROVIDERS[DEFAULT_PROVIDER].id,
+ cloudProvider: PROVIDERS[defaultProvider].id,
},
{
enabled: !smartRegionEnabled,
@@ -239,7 +244,7 @@ const Wizard: NextPageWithLayout = () => {
useOrganizationAvailableRegionsQuery(
{
slug: slug,
- cloudProvider: PROVIDERS[DEFAULT_PROVIDER].id,
+ cloudProvider: PROVIDERS[defaultProvider].id,
},
{
enabled: smartRegionEnabled,
@@ -304,7 +309,7 @@ const Wizard: NextPageWithLayout = () => {
organization: slug,
projectName: projectName || '',
postgresVersion: '',
- cloudProvider: PROVIDERS[DEFAULT_PROVIDER].id,
+ cloudProvider: PROVIDERS[defaultProvider].id,
dbPass: '',
dbPassStrength: 0,
dbRegion: defaultRegion || undefined,
@@ -455,7 +460,7 @@ const Wizard: NextPageWithLayout = () => {
useEffect(() => {
if (regionError) {
- form.setValue('dbRegion', PROVIDERS[DEFAULT_PROVIDER].default_region.displayName)
+ form.setValue('dbRegion', PROVIDERS[defaultProvider].default_region.displayName)
}
}, [regionError])
@@ -707,15 +712,20 @@ const Wizard: NextPageWithLayout = () => {
- {Object.values(PROVIDERS).map((providerObj) => {
- const label = providerObj['name']
- const value = providerObj['id']
- return (
-
- {label}
-
+ {Object.values(PROVIDERS)
+ .filter(
+ (provider) =>
+ validCloudProviders?.includes(provider.id) ?? true
)
- })}
+ .map((providerObj) => {
+ const label = providerObj['name']
+ const value = providerObj['id']
+ return (
+
+ {label}
+
+ )
+ })}
diff --git a/apps/studio/pages/project/[ref]/reports/auth.tsx b/apps/studio/pages/project/[ref]/reports/auth.tsx
index 3c65f9ca8f2f0..34838356c356e 100644
--- a/apps/studio/pages/project/[ref]/reports/auth.tsx
+++ b/apps/studio/pages/project/[ref]/reports/auth.tsx
@@ -4,7 +4,7 @@ import dayjs from 'dayjs'
import { ArrowRight, RefreshCw } from 'lucide-react'
import { useState } from 'react'
-import ReportChart from 'components/interfaces/Reports/ReportChart'
+import { ReportChart } from 'components/interfaces/Reports/ReportChart'
import ReportHeader from 'components/interfaces/Reports/ReportHeader'
import ReportPadding from 'components/interfaces/Reports/ReportPadding'
import ReportStickyNav from 'components/interfaces/Reports/ReportStickyNav'
diff --git a/apps/studio/pages/project/[ref]/reports/database.tsx b/apps/studio/pages/project/[ref]/reports/database.tsx
index 48ab74b916f04..959967b54aa7e 100644
--- a/apps/studio/pages/project/[ref]/reports/database.tsx
+++ b/apps/studio/pages/project/[ref]/reports/database.tsx
@@ -7,7 +7,7 @@ import { useEffect, useState } from 'react'
import { toast } from 'sonner'
import { useFlag, useParams } from 'common'
-import ReportChart from 'components/interfaces/Reports/ReportChart'
+import { ReportChart } from 'components/interfaces/Reports/ReportChart'
import ReportHeader from 'components/interfaces/Reports/ReportHeader'
import ReportPadding from 'components/interfaces/Reports/ReportPadding'
import { REPORT_DATERANGE_HELPER_LABELS } from 'components/interfaces/Reports/Reports.constants'
@@ -22,7 +22,7 @@ import Table from 'components/to-be-cleaned/Table'
import { ButtonTooltip } from 'components/ui/ButtonTooltip'
import ChartHandler from 'components/ui/Charts/ChartHandler'
import type { MultiAttribute } from 'components/ui/Charts/ComposedChart.utils'
-import ComposedChartHandler from 'components/ui/Charts/ComposedChartHandler'
+import { LazyComposedChartHandler } from 'components/ui/Charts/ComposedChartHandler'
import { ReportSettings } from 'components/ui/Charts/ReportSettings'
import GrafanaPromoBanner from 'components/ui/GrafanaPromoBanner'
import Panel from 'components/ui/Panel'
@@ -269,7 +269,7 @@ const DatabaseUsage = () => {
orgPlan?.id &&
(showChartsV2
? REPORT_ATTRIBUTES_V2.filter((chart) => !chart.hide).map((chart) => (
- {
))
: REPORT_ATTRIBUTES.filter((chart) => !chart.hide).map((chart, i) =>
chart.availableIn?.includes(orgPlan?.id) ? (
- {
{
+const EdgeFunctionsReportV2: NextPageWithLayout = () => {
return (
@@ -32,20 +34,23 @@ const EdgeFunctionsReport: NextPageWithLayout = () => {
)
}
-EdgeFunctionsReport.getLayout = (page) => (
+EdgeFunctionsReportV2.getLayout = (page) => (
{page}
)
-export type UpdateDateRange = (from: string, to: string) => void
-export default EdgeFunctionsReport
+export default EdgeFunctionsReportV2
const EdgeFunctionsUsage = () => {
const { ref } = useParams()
const { data: functions, isLoading: isLoadingFunctions } = useEdgeFunctionsQuery({
projectRef: ref,
})
+
+ const chartSyncId = `edge-functions-${ref}`
+ useChartHoverState(chartSyncId)
+
const [isOpen, setIsOpen] = useState(false)
const [functionIds, setFunctionIds] = useState([])
const [tempFunctionIds, setTempFunctionIds] = useState(functionIds)
@@ -61,8 +66,6 @@ const EdgeFunctionsUsage = () => {
updateDateRange,
datePickerValue,
datePickerHelpers,
- isOrgPlanLoading,
- orgPlan,
showUpgradePrompt,
setShowUpgradePrompt,
handleDatePickerChange,
@@ -71,21 +74,27 @@ const EdgeFunctionsUsage = () => {
const queryClient = useQueryClient()
const [isRefreshing, setIsRefreshing] = useState(false)
- const EDGEFN_CHARTS = getEdgeFunctionReportAttributes()
+ const reportConfig = useMemo(() => {
+ return edgeFunctionReports({
+ projectRef: ref!,
+ functions: functions ?? [],
+ startDate: selectedDateRange?.period_start?.date ?? '',
+ endDate: selectedDateRange?.period_end?.date ?? '',
+ interval: selectedDateRange?.interval ?? 'minute',
+ filters: {
+ functionIds,
+ },
+ })
+ }, [ref, functions, selectedDateRange, functionIds])
const onRefreshReport = async () => {
if (!selectedDateRange) return
setIsRefreshing(true)
- queryClient.invalidateQueries(['edge-function-report', ref])
+ queryClient.invalidateQueries(['report-v2'])
setTimeout(() => setIsRefreshing(false), 1000)
}
- if (!ref) {
- // Prevent rendering charts until the ref is available
- return <>>
- }
-
return (
<>
@@ -101,6 +110,8 @@ const EdgeFunctionsUsage = () => {
tooltip={{ content: { side: 'bottom', text: 'Refresh report' } }}
onClick={onRefreshReport}
/>
+
+
{
>
{selectedDateRange &&
- EDGEFN_CHARTS.filter((attr) => !attr.hide).map((attr, i) => (
-
- ))}
+ reportConfig
+ .filter((report) => !report.hide)
+ .map((report) => (
+
+ ))}
>
diff --git a/apps/studio/pages/project/[ref]/reports/realtime.tsx b/apps/studio/pages/project/[ref]/reports/realtime.tsx
index 27b2c81abce88..14ef2d580bdb7 100644
--- a/apps/studio/pages/project/[ref]/reports/realtime.tsx
+++ b/apps/studio/pages/project/[ref]/reports/realtime.tsx
@@ -19,7 +19,7 @@ import {
TopApiRoutesRenderer,
TotalRequestsChartRenderer,
} from 'components/interfaces/Reports/renderers/ApiRenderers'
-import ComposedChartHandler from 'components/ui/Charts/ComposedChartHandler'
+import { LazyComposedChartHandler } from 'components/ui/Charts/ComposedChartHandler'
import ReportWidget from 'components/interfaces/Reports/ReportWidget'
import ReportFilterBar from 'components/interfaces/Reports/ReportFilterBar'
@@ -187,7 +187,7 @@ const RealtimeUsage = () => {
>
{selectedDateRange &&
REALTIME_REPORT_ATTRIBUTES.filter((chart) => !chart.hide).map((chart) => (
- {
+ test('should return empty array for empty data without min/max', () => {
+ const result = fillTimeseries([], 'timestamp', 'value', 0)
+ expect(result).toEqual([])
+ })
+
+ test('should return empty array for empty data with min/max', () => {
+ const min = '2023-01-01T00:00:00.000Z'
+ const max = '2023-01-01T01:00:00.000Z'
+ const result = fillTimeseries([], 'timestamp', 'value', 0, min, max)
+
+ // When min/max are provided, the function fills the time range with default values
+ // This creates 61 data points (one for each minute from 00:00 to 01:00)
+ expect(result).toHaveLength(61)
+ expect(result[0]).toEqual({ timestamp: '2023-01-01T00:00:00.000Z', value: 0 })
+ expect(result[60]).toEqual({ timestamp: '2023-01-01T01:00:00.000Z', value: 0 })
+ })
+
+ test('should normalize timestamps when data exceeds minPointsToFill', () => {
+ const data = [
+ { timestamp: '2023-01-01T00:00:00.000Z', value: 1 },
+ { timestamp: '2023-01-01T00:01:00.000Z', value: 2 },
+ { timestamp: '2023-01-01T00:02:00.000Z', value: 3 },
+ { timestamp: '2023-01-01T00:03:00.000Z', value: 4 },
+ { timestamp: '2023-01-01T00:04:00.000Z', value: 5 },
+ { timestamp: '2023-01-01T00:05:00.000Z', value: 6 },
+ { timestamp: '2023-01-01T00:06:00.000Z', value: 7 },
+ { timestamp: '2023-01-01T00:07:00.000Z', value: 8 },
+ { timestamp: '2023-01-01T00:08:00.000Z', value: 9 },
+ { timestamp: '2023-01-01T00:09:00.000Z', value: 10 },
+ { timestamp: '2023-01-01T00:10:00.000Z', value: 11 },
+ { timestamp: '2023-01-01T00:11:00.000Z', value: 12 },
+ { timestamp: '2023-01-01T00:12:00.000Z', value: 13 },
+ { timestamp: '2023-01-01T00:13:00.000Z', value: 14 },
+ { timestamp: '2023-01-01T00:14:00.000Z', value: 15 },
+ { timestamp: '2023-01-01T00:15:00.000Z', value: 16 },
+ { timestamp: '2023-01-01T00:16:00.000Z', value: 17 },
+ { timestamp: '2023-01-01T00:17:00.000Z', value: 18 },
+ { timestamp: '2023-01-01T00:18:00.000Z', value: 19 },
+ { timestamp: '2023-01-01T00:19:00.000Z', value: 20 },
+ { timestamp: '2023-01-01T00:20:00.000Z', value: 21 },
+ ]
+ const result = fillTimeseries(data, 'timestamp', 'value', 0, undefined, undefined, 20)
+
+ // Should return normalized data without filling gaps
+ expect(result).toHaveLength(21)
+ result.forEach((item) => {
+ expect(item.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/)
+ })
+ })
+
+ test('should fill gaps in sparse data with 1-minute intervals', () => {
+ const min = '2023-01-01T00:00:00.000Z'
+ const max = '2023-01-01T00:04:00.000Z'
+ const data = [
+ { timestamp: '2023-01-01T00:01:00.000Z', value: 10 },
+ { timestamp: '2023-01-01T00:03:00.000Z', value: 30 },
+ ]
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0, min, max, 20, '2m')
+
+ expect(result).toHaveLength(5)
+ const sortedResult = result.sort((a, b) => a.timestamp.localeCompare(b.timestamp))
+
+ expect(sortedResult[0]).toEqual({ timestamp: '2023-01-01T00:00:00.000Z', value: 0 })
+ expect(sortedResult[1]).toEqual({ timestamp: '2023-01-01T00:01:00.000Z', value: 10 })
+ expect(sortedResult[2]).toEqual({ timestamp: '2023-01-01T00:02:00.000Z', value: 0 })
+ expect(sortedResult[3]).toEqual({ timestamp: '2023-01-01T00:03:00.000Z', value: 30 })
+ expect(sortedResult[4]).toEqual({ timestamp: '2023-01-01T00:04:00.000Z', value: 0 })
+ })
+
+ test('should handle multiple value keys', () => {
+ const min = '2023-01-01T00:00:00.000Z'
+ const max = '2023-01-01T00:02:00.000Z'
+ const data = [{ timestamp: '2023-01-01T00:01:00.000Z', count1: 10, count2: 100 }]
+
+ const result = fillTimeseries(data, 'timestamp', ['count1', 'count2'], 5, min, max, 20, '1m')
+
+ expect(result).toHaveLength(3)
+ const sortedResult = result.sort((a, b) => a.timestamp.localeCompare(b.timestamp))
+
+ expect(sortedResult[0]).toEqual({ timestamp: '2023-01-01T00:00:00.000Z', count1: 5, count2: 5 })
+ expect(sortedResult[1]).toEqual({
+ timestamp: '2023-01-01T00:01:00.000Z',
+ count1: 10,
+ count2: 100,
+ })
+ expect(sortedResult[2]).toEqual({ timestamp: '2023-01-01T00:02:00.000Z', count1: 5, count2: 5 })
+ })
+
+ test('should handle different interval formats', () => {
+ const min = '2023-01-01T00:00:00.000Z'
+ const max = '2023-01-01T00:10:00.000Z'
+ const data = [{ timestamp: '2023-01-01T00:05:00.000Z', value: 50 }]
+
+ // Test 5-minute intervals: 00:00, 00:05, 00:10 = 3 points
+ const result5m = fillTimeseries(data, 'timestamp', 'value', 0, min, max, 20, '5m')
+ expect(result5m).toHaveLength(3)
+
+ // Test 2-minute intervals: 00:00, 00:02, 00:04, 00:05, 00:06, 00:08, 00:10 = 7 points
+ const result2m = fillTimeseries(data, 'timestamp', 'value', 0, min, max, 20, '2m')
+ console.log(
+ '2m intervals:',
+ result2m.map((r) => r.timestamp)
+ )
+ expect(result2m).toHaveLength(7)
+
+ // Test 1-hour intervals: 00:00, 01:00, 02:00 with existing data at 00:05 = 4 points
+ const maxHour = '2023-01-01T02:00:00.000Z'
+ const result1h = fillTimeseries(data, 'timestamp', 'value', 0, min, maxHour, 20, '1h')
+ expect(result1h).toHaveLength(4)
+ })
+
+ test('should handle microsecond timestamps correctly', () => {
+ const now = dayjs.utc('2023-01-01T00:00:00.000Z')
+ const data = [
+ { timestamp: now.valueOf() * 1000, value: 1 },
+ { timestamp: now.add(1, 'minute').valueOf() * 1000, value: 2 },
+ ]
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0, undefined, undefined, 1)
+
+ expect(result).toHaveLength(2)
+ result.forEach((item) => {
+ expect(item.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/)
+ })
+ })
+
+ test('should handle mixed timestamp formats', () => {
+ const data = [
+ { timestamp: '2023-01-01T00:00:00.000Z', value: 1 },
+ { timestamp: dayjs.utc('2023-01-01T00:01:00.000Z').valueOf() * 1000, value: 2 },
+ { timestamp: '2023-01-01T00:02:00.000Z', value: 3 },
+ ]
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0, undefined, undefined, 1)
+
+ expect(result).toHaveLength(3)
+ result.forEach((item) => {
+ expect(item.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/)
+ })
+ })
+
+ test('should not fill gaps when data is dense enough', () => {
+ const data = Array.from({ length: 25 }, (_, i) => ({
+ timestamp: dayjs.utc('2023-01-01T00:00:00.000Z').add(i, 'minute').toISOString(),
+ value: i,
+ }))
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0, undefined, undefined, 20)
+
+ expect(result).toHaveLength(25)
+ expect(result).toEqual(data)
+ })
+
+ test('should handle edge case with single data point', () => {
+ const data = [{ timestamp: '2023-01-01T00:00:00.000Z', value: 1 }]
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0)
+
+ expect(result).toEqual(data)
+ })
+
+ test('should handle edge case with single data point and min/max', () => {
+ const min = '2023-01-01T00:00:00.000Z'
+ const max = '2023-01-01T00:02:00.000Z'
+ const data = [{ timestamp: '2023-01-01T00:01:00.000Z', value: 1 }]
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0, min, max, 20, '1m')
+
+ expect(result).toHaveLength(3)
+ const sortedResult = result.sort((a, b) => a.timestamp.localeCompare(b.timestamp))
+
+ expect(sortedResult[0]).toEqual({ timestamp: '2023-01-01T00:00:00.000Z', value: 0 })
+ expect(sortedResult[1]).toEqual({ timestamp: '2023-01-01T00:01:00.000Z', value: 1 })
+ expect(sortedResult[2]).toEqual({ timestamp: '2023-01-01T00:02:00.000Z', value: 0 })
+ })
+
+ test('should handle invalid interval format gracefully', () => {
+ const min = '2023-01-01T00:00:00.000Z'
+ const max = '2023-01-01T00:02:00.000Z'
+ const data = [{ timestamp: '2023-01-01T00:01:00.000Z', value: 1 }]
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0, min, max, 20, 'invalid')
+
+ // Should fall back to default behavior
+ expect(result.length).toBeGreaterThan(1)
+ })
+
+ test('should preserve all properties of original data', () => {
+ const data = [
+ {
+ timestamp: '2023-01-01T00:00:00.000Z',
+ value: 1,
+ extra: 'data',
+ nested: { prop: 'value' },
+ },
+ ]
+
+ const result = fillTimeseries(data, 'timestamp', 'value', 0)
+
+ expect(result[0]).toEqual(data[0])
+ expect(result[0].extra).toBe('data')
+ expect(result[0].nested).toEqual({ prop: 'value' })
+ })
+
+ test('should handle empty value keys array', () => {
+ const min = '2023-01-01T00:00:00.000Z'
+ const max = '2023-01-01T00:01:00.000Z'
+ const data = [{ timestamp: '2023-01-01T00:00:30.000Z', value: 1 }]
+
+ const result = fillTimeseries(data, 'timestamp', [], 0, min, max, 20, '30s')
+
+ expect(result).toHaveLength(3)
+ result.forEach((item) => {
+ expect(item).toHaveProperty('timestamp')
+ expect(item).not.toHaveProperty('value')
+ })
+ })
+})
describe('checkForWithClause', () => {
test('basic queries', () => {
diff --git a/apps/studio/tests/vitestSetup.ts b/apps/studio/tests/vitestSetup.ts
index f309589b528ca..df9816a4bbae6 100644
--- a/apps/studio/tests/vitestSetup.ts
+++ b/apps/studio/tests/vitestSetup.ts
@@ -4,6 +4,14 @@ import { createDynamicRouteParser } from 'next-router-mock/dist/dynamic-routes'
import { afterAll, afterEach, beforeAll, vi } from 'vitest'
import { routerMock } from './lib/route-mock'
import { mswServer } from './lib/msw'
+import dayjs from 'dayjs'
+import utc from 'dayjs/plugin/utc'
+import timezone from 'dayjs/plugin/timezone'
+import relativeTime from 'dayjs/plugin/relativeTime'
+
+dayjs.extend(utc)
+dayjs.extend(timezone)
+dayjs.extend(relativeTime)
// Uncomment this if HTML in errors are being annoying.
//
@@ -38,7 +46,7 @@ beforeAll(() => {
vi.mock('next/compat/router', () => require('next-router-mock'))
// Mock the useParams hook from common module globally
- vi.mock('common', async (importOriginal) => {
+ vi.mock('common', async (importOriginal: any) => {
const actual = await importOriginal()
return {
...(typeof actual === 'object' ? actual : {}),
diff --git a/apps/www/components/Hero/Hero.tsx b/apps/www/components/Hero/Hero.tsx
index 7146986b5599a..94d27bf058280 100644
--- a/apps/www/components/Hero/Hero.tsx
+++ b/apps/www/components/Hero/Hero.tsx
@@ -3,8 +3,6 @@ import Link from 'next/link'
import { Button } from 'ui'
import SectionContainer from '~/components/Layouts/SectionContainer'
import { useSendTelemetryEvent } from '~/lib/telemetry'
-import AnnouncementBadge from '../Announcement/Badge'
-import { announcement } from 'ui-patterns'
const Hero = () => {
const sendTelemetryEvent = useSendTelemetryEvent()
diff --git a/apps/www/components/Nav/index.tsx b/apps/www/components/Nav/index.tsx
index 8746eb592b94b..c9232b6bc052b 100644
--- a/apps/www/components/Nav/index.tsx
+++ b/apps/www/components/Nav/index.tsx
@@ -9,10 +9,10 @@ import { useIsLoggedIn, useUser } from 'common'
import { Button, buttonVariants, cn } from 'ui'
import { AuthenticatedDropdownMenu } from 'ui-patterns'
+import { useSendTelemetryEvent } from 'lib/telemetry'
import GitHubButton from './GitHubButton'
import HamburgerButton from './HamburgerMenu'
import RightClickBrandLogo from './RightClickBrandLogo'
-import { useSendTelemetryEvent } from 'lib/telemetry'
import useDropdownMenu from './useDropdownMenu'
import { getMenu } from 'data/nav'
diff --git a/apps/www/pages/_app.tsx b/apps/www/pages/_app.tsx
index f033817bc8a9f..be7a9e4042e7e 100644
--- a/apps/www/pages/_app.tsx
+++ b/apps/www/pages/_app.tsx
@@ -5,11 +5,12 @@ import '../styles/index.css'
import {
AuthProvider,
FeatureFlagProvider,
+ getFlags as getConfigCatFlags,
IS_PLATFORM,
PageTelemetry,
+ TelemetryTagManager,
ThemeProvider,
useThemeSandbox,
- TelemetryTagManager,
} from 'common'
import { DefaultSeo } from 'next-seo'
import { AppProps } from 'next/app'
@@ -86,7 +87,7 @@ export default function App({ Component, pageProps }: AppProps) {
/>
-
+
theme.value)}
enableSystem
diff --git a/packages/common/configcat.ts b/packages/common/configcat.ts
index 534a3b0a77dae..89046050d734d 100644
--- a/packages/common/configcat.ts
+++ b/packages/common/configcat.ts
@@ -3,6 +3,17 @@ import * as configcat from 'configcat-js'
let client: configcat.IConfigCatClient
const endpoint = '/configuration-files/configcat-proxy/frontend-v2/config_v6.json'
+/**
+ * To set up ConfigCat for another app
+ * - Declare `FeatureFlagProvider` at the _app level
+ * - Pass in `getFlags` as `getConfigCatFlags` into `FeatureFlagProvider`
+ * - [Joshen] Wondering if this should just be baked into FeatureFlagProvider, rather than passed as a prop
+ * - Ensure that your app has the `NEXT_PUBLIC_CONFIGCAT_PROXY_URL` env var
+ * - [Joshen] Wondering if we can just set a default value for each env var, so can skip setting up env var in Vercel
+ * - Verify that your flags are now loading by console logging `flagValues` in `FeatureFlagProvider`'s useEffect
+ * - Can now use ConfigCat feature flags with the `useFlag` hook
+ */
+
export const fetchHandler: typeof fetch = async (input, init) => {
try {
return await fetch(input, init)
@@ -16,10 +27,14 @@ export const fetchHandler: typeof fetch = async (input, init) => {
}
async function getClient() {
- if (client) {
- return client
+ if (!process.env.NEXT_PUBLIC_CONFIGCAT_PROXY_URL) {
+ console.error(
+ 'Failed to get ConfigCat client: missing env var "NEXT_PUBLIC_CONFIGCAT_PROXY_URL"'
+ )
}
+ if (client) return client
+
const response = await fetchHandler(process.env.NEXT_PUBLIC_CONFIGCAT_PROXY_URL + endpoint)
const options = { pollIntervalSeconds: 7 * 60 } // 7 minutes
if (response.status !== 200) {
@@ -40,10 +55,11 @@ async function getClient() {
}
export async function getFlags(userEmail: string = '') {
+ const client = await getClient()
+
if (userEmail) {
- const client = await getClient()
return client.getAllValuesAsync(new configcat.User(userEmail))
+ } else {
+ return client.getAllValuesAsync()
}
-
- return []
}
diff --git a/packages/shared-data/regions.ts b/packages/shared-data/regions.ts
index 27caf5b26597d..d38d8db339dd6 100644
--- a/packages/shared-data/regions.ts
+++ b/packages/shared-data/regions.ts
@@ -1,4 +1,4 @@
-export type CloudProvider = 'FLY' | 'AWS' | 'AWS_K8S'
+export type CloudProvider = 'FLY' | 'AWS' | 'AWS_K8S' | 'AWS_NIMBUS'
export type Region = typeof AWS_REGIONS | typeof FLY_REGIONS
export const AWS_REGIONS = {