diff --git a/apps/docs/app/api/graphql/route.ts b/apps/docs/app/api/graphql/route.ts index f7ecee53de6d9..2b7020fabc16c 100644 --- a/apps/docs/app/api/graphql/route.ts +++ b/apps/docs/app/api/graphql/route.ts @@ -37,6 +37,31 @@ export const preferredRegion = [ const MAX_DEPTH = 5 +function isAllowedCorsOrigin(origin: string): boolean { + const exactMatches = IS_DEV + ? ['http://localhost:8082', 'https://supabase.com'] + : ['https://supabase.com'] + if (exactMatches.includes(origin)) { + return true + } + + return /^https:\/\/[\w-]+\w-supabase.vercel.app$/.test(origin) +} + +function getCorsHeaders(request: Request): Record { + const origin = request.headers.get('Origin') + + if (origin && isAllowedCorsOrigin(origin)) { + return { + 'Access-Control-Allow-Origin': origin, + 'Access-Control-Allow-Methods': 'POST, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type, Accept', + } + } + + return {} +} + const validationRules = [ ...specifiedRules, createQueryDepthLimiter(MAX_DEPTH), @@ -78,13 +103,18 @@ async function handleGraphQLRequest(request: Request): Promise { const { query, variables, operationName } = parsedBody.data const validationErrors = validateGraphQLRequest(query, isDevGraphiQL(request)) if (validationErrors.length > 0) { - return NextResponse.json({ - errors: validationErrors.map((error) => ({ - message: error.message, - locations: error.locations, - path: error.path, - })), - }) + return NextResponse.json( + { + errors: validationErrors.map((error) => ({ + message: error.message, + locations: error.locations, + path: error.path, + })), + }, + { + headers: getCorsHeaders(request), + } + ) } const result = await graphql({ @@ -94,7 +124,9 @@ async function handleGraphQLRequest(request: Request): Promise { variableValues: variables, operationName, }) - return NextResponse.json(result) + return NextResponse.json(result, { + headers: getCorsHeaders(request), + }) } function validateGraphQLRequest(query: string, isDevGraphiQL = false): ReadonlyArray { @@ -112,6 +144,14 @@ function validateGraphQLRequest(query: string, isDevGraphiQL = false): ReadonlyA return validate(rootGraphQLSchema, documentAST, rules) } +export async function OPTIONS(request: Request): Promise { + const corsHeaders = getCorsHeaders(request) + return new NextResponse(null, { + status: 204, + headers: corsHeaders, + }) +} + export async function POST(request: Request): Promise { try { const result = await handleGraphQLRequest(request) @@ -130,18 +170,28 @@ export async function POST(request: Request): Promise { // https://github.com/getsentry/sentry-javascript/issues/9626 await Sentry.flush(2000) - return NextResponse.json({ - errors: [{ message: error.isPrivate() ? 'Internal Server Error' : error.message }], - }) + return NextResponse.json( + { + errors: [{ message: error.isPrivate() ? 'Internal Server Error' : error.message }], + }, + { + headers: getCorsHeaders(request), + } + ) } else { Sentry.captureException(error) // Do not let Vercel close the process until Sentry has flushed // https://github.com/getsentry/sentry-javascript/issues/9626 await Sentry.flush(2000) - return NextResponse.json({ - errors: [{ message: 'Internal Server Error' }], - }) + return NextResponse.json( + { + errors: [{ message: 'Internal Server Error' }], + }, + { + headers: getCorsHeaders(request), + } + ) } } } diff --git a/apps/docs/content/guides/getting-started/architecture.mdx b/apps/docs/content/guides/getting-started/architecture.mdx index 970431df96ad3..559bb711276fe 100644 --- a/apps/docs/content/guides/getting-started/architecture.mdx +++ b/apps/docs/content/guides/getting-started/architecture.mdx @@ -29,7 +29,7 @@ Each Supabase project consists of several tools: ### Postgres (database) -Postgres is the core of Supabase. We do not abstract the Postgres database—you can access it and use it with full privileges. We provide tools which makes Postgres as easy to use as Firebase. +Postgres is the core of Supabase. We do not abstract the Postgres database—you can access it and use it with full privileges. We provide tools which make Postgres as easy to use as Firebase. - Official Docs: [postgresql.org/docs](https://www.postgresql.org/docs/current/index.html) - Source code: [github.com/postgres/postgres](https://github.com/postgres/postgres) (mirror) diff --git a/apps/docs/content/guides/getting-started/features.mdx b/apps/docs/content/guides/getting-started/features.mdx index 47d4cabb9b769..72e0269556e4a 100644 --- a/apps/docs/content/guides/getting-started/features.mdx +++ b/apps/docs/content/guides/getting-started/features.mdx @@ -64,7 +64,7 @@ Deploy read-only databases across multiple regions, for lower latency and better ### Log drains -Export Supabase logs at to 3rd party providers and external tooling. [Docs](/docs/guides/platform/log-drains). +Export Supabase logs to 3rd party providers and external tooling. [Docs](/docs/guides/platform/log-drains). ## Studio diff --git a/apps/studio/components/interfaces/Settings/Logs/ErrorCodeDialog.tsx b/apps/studio/components/interfaces/Settings/Logs/ErrorCodeDialog.tsx new file mode 100644 index 0000000000000..a14b40e82e6a4 --- /dev/null +++ b/apps/studio/components/interfaces/Settings/Logs/ErrorCodeDialog.tsx @@ -0,0 +1,111 @@ +import { + Alert_Shadcn_, + AlertDescription_Shadcn_, + AlertTitle_Shadcn_, + Badge, + Button_Shadcn_, + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from 'ui' +import { useErrorCodesQuery } from 'data/content-api/docs-error-codes-query' +import { type ErrorCodeQueryQuery, Service } from 'data/graphql/graphql' +import { AlertTriangle } from 'lucide-react' +import ShimmeringLoader from 'ui-patterns/ShimmeringLoader' + +interface ErrorCodeDialogProps { + open: boolean + onOpenChange: (open: boolean) => void + errorCode: string + service?: Service +} + +export const ErrorCodeDialog = ({ + open, + onOpenChange, + errorCode, + service, +}: ErrorCodeDialogProps) => { + const { data, isLoading, isSuccess, refetch } = useErrorCodesQuery( + { code: errorCode, service }, + { enabled: open } + ) + + return ( + + + + + Help for error code {errorCode} + + + {isLoading && } + {isSuccess && } + {!isLoading && !isSuccess && } + + + + + ) +} + +const LoadingState = () => ( + <> + + + +) + +const SuccessState = ({ data }: { data: ErrorCodeQueryQuery | undefined }) => { + const errors = data?.errors?.nodes?.filter((error) => !!error.message) + if (!errors || errors.length === 0) { + return <>No information found for this error code. + } + + return ( + <> +

Possible explanations for this error:

+
+ {errors.map((error) => ( + + ))} +
+ + ) +} + +const ErrorExplanation = ({ + code, + service, + message, +}: { + code: string + service: Service + message?: string | null +}) => { + if (!message) return null + + return ( + <> + {service} +

{message}

+ + ) +} + +const ErrorState = ({ refetch }: { refetch?: () => void }) => ( + + + Lookup failed + +

Failed to look up error code help info

+ {refetch && ( + + Try again + + )} +
+
+) diff --git a/apps/studio/components/interfaces/Settings/Logs/LogSelectionRenderers/DefaultPreviewSelectionRenderer.tsx b/apps/studio/components/interfaces/Settings/Logs/LogSelectionRenderers/DefaultPreviewSelectionRenderer.tsx index e741d74fe436d..fd4de31277c57 100644 --- a/apps/studio/components/interfaces/Settings/Logs/LogSelectionRenderers/DefaultPreviewSelectionRenderer.tsx +++ b/apps/studio/components/interfaces/Settings/Logs/LogSelectionRenderers/DefaultPreviewSelectionRenderer.tsx @@ -1,3 +1,4 @@ +import { Service } from 'data/graphql/graphql' import { useLogsUrlState } from 'hooks/analytics/useLogsUrlState' import { useEffect, useState } from 'react' import { toast } from 'sonner' @@ -12,6 +13,7 @@ import { Separator, } from 'ui' import { TimestampInfo } from 'ui-patterns' +import { ErrorCodeDialog } from '../ErrorCodeDialog' import type { LogSearchCallback, PreviewLogData } from '../Logs.types' import { ResponseCodeFormatter } from '../LogsFormatters' @@ -32,12 +34,18 @@ const PropertyRow = ({ keyName, value, dataTestId, + path, }: { keyName: string value: any dataTestId?: string + path?: string }) => { const { setSearch } = useLogsUrlState() + const [showErrorInfo, setShowErrorInfo] = useState(false) + + const service = path?.startsWith('/auth/') ? Service.Auth : undefined + const handleSearch: LogSearchCallback = async (event: string, { query }: { query?: string }) => { setSearch(query || '') } @@ -118,88 +126,108 @@ const PropertyRow = ({ } return ( - - -
-
-

- {keyName} -

+ <> + + +
- {isExpanded ? ( - - ) : isTimestamp ? ( - - ) : isStatus ? ( -
- -
- ) : isMethod ? ( -
- -
- ) : ( -
{value}
- )} +

+ {keyName} +

+
+ {isExpanded ? ( + + ) : isTimestamp ? ( + + ) : isStatus ? ( +
+ +
+ ) : isMethod ? ( +
+ +
+ ) : ( +
{value}
+ )} +
-
- - - Copy {keyName} - {!isObject && ( - { - setIsExpanded(!isExpanded) - }} - > - {isExpanded ? 'Collapse' : 'Expand'} value - - )} - {(isMethod || isUserAgent || isStatus || isPath) && ( - { - handleSearch('search-input-change', { query: value }) - }} - > - Search by {keyName} - - )} - {isSearch - ? getSearchPairs().map((pair) => ( - { - handleSearch('search-input-change', { query: pair }) - }} - > - Search by {pair} - - )) - : null} - - - + + + {keyName === 'error_code' && ( + { + setShowErrorInfo(true) + }} + > + More information + + )} + Copy {keyName} + {!isObject && ( + { + setIsExpanded(!isExpanded) + }} + > + {isExpanded ? 'Collapse' : 'Expand'} value + + )} + {(isMethod || isUserAgent || isStatus || isPath) && ( + { + handleSearch('search-input-change', { query: value }) + }} + > + Search by {keyName} + + )} + {isSearch + ? getSearchPairs().map((pair) => ( + { + handleSearch('search-input-change', { query: pair }) + }} + > + Search by {pair} + + )) + : null} + + + + {keyName === 'error_code' && ( + + )} + ) } const DefaultPreviewSelectionRenderer = ({ log }: { log: PreviewLogData }) => { const { timestamp, event_message, metadata, id, status, ...rest } = log + const path = typeof log.path === 'string' ? log.path : undefined const log_file = log?.metadata?.[0]?.log_file return ( @@ -212,7 +240,7 @@ const DefaultPreviewSelectionRenderer = ({ log }: { log: PreviewLogData }) => { )} {Object.entries(rest).map(([key, value]) => { - return + return })} {log?.event_message && ( diff --git a/apps/studio/data/content-api/docs-error-codes-query.ts b/apps/studio/data/content-api/docs-error-codes-query.ts new file mode 100644 index 0000000000000..381c655222bb8 --- /dev/null +++ b/apps/studio/data/content-api/docs-error-codes-query.ts @@ -0,0 +1,43 @@ +import { useQuery, type UseQueryOptions } from '@tanstack/react-query' +import { graphql } from 'data/graphql' +import { executeGraphQL } from 'data/graphql/execute' +import { Service } from 'data/graphql/graphql' +import { contentApiKeys } from './keys' + +const ErrorCodeQuery = graphql(` + query ErrorCodeQuery($code: String!, $service: Service) { + errors(code: $code, service: $service) { + nodes { + code + service + message + } + } + } +`) + +interface Variables { + code: string + service?: Service +} + +async function getErrorCodeDescriptions({ code, service }: Variables, signal?: AbortSignal) { + return await executeGraphQL(ErrorCodeQuery, { variables: { code, service }, signal }) +} + +type ErrorCodeDescriptionsData = Awaited> +type ErrorCodeDescriptionsError = unknown + +export const useErrorCodesQuery = ( + variables: Variables, + { + enabled = true, + ...options + }: UseQueryOptions = {} +) => { + return useQuery( + contentApiKeys.errorCodes(variables), + ({ signal }) => getErrorCodeDescriptions(variables, signal), + { enabled, ...options } + ) +} diff --git a/apps/studio/data/content-api/keys.ts b/apps/studio/data/content-api/keys.ts new file mode 100644 index 0000000000000..80d437700a099 --- /dev/null +++ b/apps/studio/data/content-api/keys.ts @@ -0,0 +1,4 @@ +export const contentApiKeys = { + errorCodes: ({ code, service }: { code: string; service?: string }) => + ['content-api', 'error-codes', { code, service }] as const, +} diff --git a/apps/studio/data/graphql/execute.ts b/apps/studio/data/graphql/execute.ts new file mode 100644 index 0000000000000..3305e1ba5d607 --- /dev/null +++ b/apps/studio/data/graphql/execute.ts @@ -0,0 +1,36 @@ +import { handleError } from 'data/fetchers' +import type { TypedDocumentString } from './graphql' + +const CONTENT_API_URL = process.env.NEXT_PUBLIC_CONTENT_API_URL! + +export async function executeGraphQL( + query: TypedDocumentString, + { variables, signal }: { variables?: TVariables; signal?: AbortSignal } +) { + try { + const response = await fetch(CONTENT_API_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + query, + variables, + }), + signal, + }) + + if (!response.ok) { + throw new Error('Failed network response from Content API') + } + + const { data, errors } = await response.json() + if (errors) { + throw errors + } + + return data as TResult + } catch (err) { + handleError(err) + } +} diff --git a/apps/studio/data/graphql/fragment-masking.ts b/apps/studio/data/graphql/fragment-masking.ts new file mode 100644 index 0000000000000..494960811faac --- /dev/null +++ b/apps/studio/data/graphql/fragment-masking.ts @@ -0,0 +1,84 @@ +/* eslint-disable */ +import { ResultOf, DocumentTypeDecoration } from '@graphql-typed-document-node/core' +import { Incremental, TypedDocumentString } from './graphql' + +export type FragmentType> = + TDocumentType extends DocumentTypeDecoration + ? [TType] extends [{ ' $fragmentName'?: infer TKey }] + ? TKey extends string + ? { ' $fragmentRefs'?: { [key in TKey]: TType } } + : never + : never + : never + +// return non-nullable if `fragmentType` is non-nullable +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: FragmentType> +): TType +// return nullable if `fragmentType` is undefined +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: FragmentType> | undefined +): TType | undefined +// return nullable if `fragmentType` is nullable +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: FragmentType> | null +): TType | null +// return nullable if `fragmentType` is nullable or undefined +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: FragmentType> | null | undefined +): TType | null | undefined +// return array of non-nullable if `fragmentType` is array of non-nullable +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: Array>> +): Array +// return array of nullable if `fragmentType` is array of nullable +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: Array>> | null | undefined +): Array | null | undefined +// return readonly array of non-nullable if `fragmentType` is array of non-nullable +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: ReadonlyArray>> +): ReadonlyArray +// return readonly array of nullable if `fragmentType` is array of nullable +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: ReadonlyArray>> | null | undefined +): ReadonlyArray | null | undefined +export function useFragment( + _documentNode: DocumentTypeDecoration, + fragmentType: + | FragmentType> + | Array>> + | ReadonlyArray>> + | null + | undefined +): TType | Array | ReadonlyArray | null | undefined { + return fragmentType as any +} + +export function makeFragmentData< + F extends DocumentTypeDecoration, + FT extends ResultOf, +>(data: FT, _fragment: F): FragmentType { + return data as FragmentType +} +export function isFragmentReady( + queryNode: TypedDocumentString, + fragmentNode: TypedDocumentString, + data: FragmentType, any>> | null | undefined +): data is FragmentType { + const deferredFields = queryNode.__meta__?.deferredFields as Record + const fragName = fragmentNode.__meta__?.fragmentName as string | undefined + + if (!deferredFields || !fragName) return true + + const fields = deferredFields[fragName] ?? [] + return fields.length > 0 && fields.every((field) => data && field in data) +} diff --git a/apps/studio/data/graphql/gql.ts b/apps/studio/data/graphql/gql.ts new file mode 100644 index 0000000000000..1f4ff76f6e6ff --- /dev/null +++ b/apps/studio/data/graphql/gql.ts @@ -0,0 +1,32 @@ +/* eslint-disable */ +import * as types from './graphql' + +/** + * Map of all GraphQL operations in the project. + * + * This map has several performance disadvantages: + * 1. It is not tree-shakeable, so it will include all operations in the project. + * 2. It is not minifiable, so the string of a GraphQL query will be multiple times inside the bundle. + * 3. It does not support dead code elimination, so it will add unused operations. + * + * Therefore it is highly recommended to use the babel or swc plugin for production. + * Learn more about it here: https://the-guild.dev/graphql/codegen/plugins/presets/preset-client#reducing-bundle-size + */ +type Documents = { + '\n query ErrorCodeQuery($code: String!, $service: Service) {\n errors(code: $code, service: $service) {\n nodes {\n code\n service\n message\n }\n }\n }\n': typeof types.ErrorCodeQueryDocument +} +const documents: Documents = { + '\n query ErrorCodeQuery($code: String!, $service: Service) {\n errors(code: $code, service: $service) {\n nodes {\n code\n service\n message\n }\n }\n }\n': + types.ErrorCodeQueryDocument, +} + +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: '\n query ErrorCodeQuery($code: String!, $service: Service) {\n errors(code: $code, service: $service) {\n nodes {\n code\n service\n message\n }\n }\n }\n' +): typeof import('./graphql').ErrorCodeQueryDocument + +export function graphql(source: string) { + return (documents as any)[source] ?? {} +} diff --git a/apps/studio/data/graphql/graphql.ts b/apps/studio/data/graphql/graphql.ts new file mode 100644 index 0000000000000..99d32bef6b994 --- /dev/null +++ b/apps/studio/data/graphql/graphql.ts @@ -0,0 +1,266 @@ +/* eslint-disable */ +import { DocumentTypeDecoration } from '@graphql-typed-document-node/core' +export type Maybe = T | null +export type InputMaybe = Maybe +export type Exact = { [K in keyof T]: T[K] } +export type MakeOptional = Omit & { [SubKey in K]?: Maybe } +export type MakeMaybe = Omit & { [SubKey in K]: Maybe } +export type MakeEmpty = { + [_ in K]?: never +} +export type Incremental = + | T + | { [P in keyof T]?: P extends ' $fragmentName' | '__typename' ? T[P] : never } +/** All built-in and custom scalars, mapped to their actual values */ +export type Scalars = { + ID: { input: string; output: string } + String: { input: string; output: string } + Boolean: { input: boolean; output: boolean } + Int: { input: number; output: number } + Float: { input: number; output: number } +} + +/** A reference document containing a description of a Supabase CLI command */ +export type CliCommandReference = SearchResult & { + __typename?: 'CLICommandReference' + /** The content of the reference document, as text */ + content?: Maybe + /** The URL of the document */ + href?: Maybe + /** The title of the document */ + title?: Maybe +} + +/** A reference document containing a description of a function from a Supabase client library */ +export type ClientLibraryFunctionReference = SearchResult & { + __typename?: 'ClientLibraryFunctionReference' + /** The content of the reference document, as text */ + content?: Maybe + /** The URL of the document */ + href?: Maybe + /** The programming language for which the function is written */ + language: Language + /** The name of the function or method */ + methodName?: Maybe + /** The title of the document */ + title?: Maybe +} + +/** An error returned by a Supabase service */ +export type Error = { + __typename?: 'Error' + /** The unique code identifying the error. The code is stable, and can be used for string matching during error handling. */ + code: Scalars['String']['output'] + /** The HTTP status code returned with this error. */ + httpStatusCode?: Maybe + /** A human-readable message describing the error. The message is not stable, and should not be used for string matching during error handling. Use the code instead. */ + message?: Maybe + /** The Supabase service that returns this error. */ + service: Service +} + +/** A collection of Errors */ +export type ErrorCollection = { + __typename?: 'ErrorCollection' + /** A list of edges containing nodes in this collection */ + edges: Array + /** The nodes in this collection, directly accessible */ + nodes: Array + /** Pagination information */ + pageInfo: PageInfo + /** The total count of items available in this collection */ + totalCount: Scalars['Int']['output'] +} + +/** An edge in a collection of Errors */ +export type ErrorEdge = { + __typename?: 'ErrorEdge' + /** A cursor for use in pagination */ + cursor: Scalars['String']['output'] + /** The Error at the end of the edge */ + node: Error +} + +/** A document containing content from the Supabase docs. This is a guide, which might describe a concept, or explain the steps for using or implementing a feature. */ +export type Guide = SearchResult & { + __typename?: 'Guide' + /** The full content of the document, including all subsections (both those matching and not matching any query string) and possibly more content */ + content?: Maybe + /** The URL of the document */ + href?: Maybe + /** The subsections of the document. If the document is returned from a search match, only matching content chunks are returned. For the full content of the original document, use the content field in the parent Guide. */ + subsections?: Maybe + /** The title of the document */ + title?: Maybe +} + +export enum Language { + Csharp = 'CSHARP', + Dart = 'DART', + Javascript = 'JAVASCRIPT', + Kotlin = 'KOTLIN', + Python = 'PYTHON', + Swift = 'SWIFT', +} + +/** Pagination information for a collection */ +export type PageInfo = { + __typename?: 'PageInfo' + /** Cursor pointing to the end of the current page */ + endCursor?: Maybe + /** Whether there are more items after the current page */ + hasNextPage: Scalars['Boolean']['output'] + /** Whether there are more items before the current page */ + hasPreviousPage: Scalars['Boolean']['output'] + /** Cursor pointing to the start of the current page */ + startCursor?: Maybe +} + +export type RootQueryType = { + __typename?: 'RootQueryType' + /** Get the details of an error code returned from a Supabase service */ + error?: Maybe + /** Get error codes that can potentially be returned by Supabase services */ + errors?: Maybe + /** Get the GraphQL schema for this endpoint */ + schema: Scalars['String']['output'] + /** Search the Supabase docs for content matching a query string */ + searchDocs?: Maybe +} + +export type RootQueryTypeErrorArgs = { + code: Scalars['String']['input'] + service: Service +} + +export type RootQueryTypeErrorsArgs = { + after?: InputMaybe + before?: InputMaybe + code?: InputMaybe + first?: InputMaybe + last?: InputMaybe + service?: InputMaybe +} + +export type RootQueryTypeSearchDocsArgs = { + limit?: InputMaybe + query: Scalars['String']['input'] +} + +/** Document that matches a search query */ +export type SearchResult = { + /** The full content of the matching result */ + content?: Maybe + /** The URL of the matching result */ + href?: Maybe + /** The title of the matching result */ + title?: Maybe +} + +/** A collection of search results containing content from Supabase docs */ +export type SearchResultCollection = { + __typename?: 'SearchResultCollection' + /** A list of edges containing nodes in this collection */ + edges: Array + /** The nodes in this collection, directly accessible */ + nodes: Array + /** The total count of items available in this collection */ + totalCount: Scalars['Int']['output'] +} + +/** An edge in a collection of SearchResults */ +export type SearchResultEdge = { + __typename?: 'SearchResultEdge' + /** The SearchResult at the end of the edge */ + node: SearchResult +} + +export enum Service { + Auth = 'AUTH', + Realtime = 'REALTIME', + Storage = 'STORAGE', +} + +/** A content chunk taken from a larger document in the Supabase docs */ +export type Subsection = { + __typename?: 'Subsection' + /** The content of the subsection */ + content?: Maybe + /** The URL of the subsection */ + href?: Maybe + /** The title of the subsection */ + title?: Maybe +} + +/** A collection of content chunks from a larger document in the Supabase docs. */ +export type SubsectionCollection = { + __typename?: 'SubsectionCollection' + /** A list of edges containing nodes in this collection */ + edges: Array + /** The nodes in this collection, directly accessible */ + nodes: Array + /** The total count of items available in this collection */ + totalCount: Scalars['Int']['output'] +} + +/** An edge in a collection of Subsections */ +export type SubsectionEdge = { + __typename?: 'SubsectionEdge' + /** The Subsection at the end of the edge */ + node: Subsection +} + +/** A document describing how to troubleshoot an issue when using Supabase */ +export type TroubleshootingGuide = SearchResult & { + __typename?: 'TroubleshootingGuide' + /** The full content of the troubleshooting guide */ + content?: Maybe + /** The URL of the troubleshooting guide */ + href?: Maybe + /** The title of the troubleshooting guide */ + title?: Maybe +} + +export type ErrorCodeQueryQueryVariables = Exact<{ + code: Scalars['String']['input'] + service?: InputMaybe +}> + +export type ErrorCodeQueryQuery = { + __typename?: 'RootQueryType' + errors?: { + __typename?: 'ErrorCollection' + nodes: Array<{ __typename?: 'Error'; code: string; service: Service; message?: string | null }> + } | null +} + +export class TypedDocumentString + extends String + implements DocumentTypeDecoration +{ + __apiType?: DocumentTypeDecoration['__apiType'] + private value: string + public __meta__?: Record | undefined + + constructor(value: string, __meta__?: Record | undefined) { + super(value) + this.value = value + this.__meta__ = __meta__ + } + + toString(): string & DocumentTypeDecoration { + return this.value + } +} + +export const ErrorCodeQueryDocument = new TypedDocumentString(` + query ErrorCodeQuery($code: String!, $service: Service) { + errors(code: $code, service: $service) { + nodes { + code + service + message + } + } +} + `) as unknown as TypedDocumentString diff --git a/apps/studio/data/graphql/index.ts b/apps/studio/data/graphql/index.ts new file mode 100644 index 0000000000000..f9bc8e591d00f --- /dev/null +++ b/apps/studio/data/graphql/index.ts @@ -0,0 +1,2 @@ +export * from './fragment-masking' +export * from './gql' diff --git a/apps/studio/next.config.js b/apps/studio/next.config.js index 1e836d3a29656..9d03a64c76f29 100644 --- a/apps/studio/next.config.js +++ b/apps/studio/next.config.js @@ -29,6 +29,11 @@ const SUPABASE_DOCS_PROJECT_URL = process.env.NEXT_PUBLIC_SUPABASE_URL ? new URL(process.env.NEXT_PUBLIC_SUPABASE_URL).origin : '' +// Needed to test docs content API in local dev +const SUPABASE_CONTENT_API_URL = process.env.NEXT_PUBLIC_CONTENT_API_URL + ? new URL(process.env.NEXT_PUBLIC_CONTENT_API_URL).origin + : '' + const SUPABASE_STAGING_PROJECTS_URL = 'https://*.supabase.red' const SUPABASE_STAGING_PROJECTS_URL_WS = 'wss://*.supabase.red' const SUPABASE_COM_URL = 'https://supabase.com' @@ -78,7 +83,7 @@ const csp = [ process.env.NEXT_PUBLIC_ENVIRONMENT === 'local' || process.env.NEXT_PUBLIC_ENVIRONMENT === 'staging' ? [ - `default-src 'self' ${DEFAULT_SRC_URLS} ${SUPABASE_STAGING_PROJECTS_URL} ${SUPABASE_STAGING_PROJECTS_URL_WS} ${VERCEL_LIVE_URL} ${PUSHER_URL_WS} ${SUPABASE_DOCS_PROJECT_URL} ${SENTRY_URL};`, + `default-src 'self' ${DEFAULT_SRC_URLS} ${SUPABASE_STAGING_PROJECTS_URL} ${SUPABASE_STAGING_PROJECTS_URL_WS} ${VERCEL_LIVE_URL} ${PUSHER_URL_WS} ${SUPABASE_DOCS_PROJECT_URL} ${SUPABASE_CONTENT_API_URL} ${SENTRY_URL};`, `script-src 'self' 'unsafe-eval' 'unsafe-inline' ${SCRIPT_SRC_URLS} ${VERCEL_LIVE_URL} ${PUSHER_URL};`, `frame-src 'self' ${FRAME_SRC_URLS} ${VERCEL_LIVE_URL};`, `img-src 'self' blob: data: ${IMG_SRC_URLS} ${SUPABASE_STAGING_PROJECTS_URL} ${VERCEL_URL};`, diff --git a/apps/studio/package.json b/apps/studio/package.json index 3363b8b050327..8a35b6160e052 100644 --- a/apps/studio/package.json +++ b/apps/studio/package.json @@ -20,7 +20,9 @@ "typecheck": "tsc --noEmit", "prettier:check": "prettier --check .", "prettier:write": "prettier --write .", - "build:deno-types": "tsx scripts/deno-types.ts" + "build:deno-types": "tsx scripts/deno-types.ts", + "build:graphql-types": "tsx scripts/download-graphql-schema.mts && pnpm graphql-codegen --config scripts/codegen.ts", + "build:graphql-types:watch": "pnpm graphql-codegen --config scripts/codegen.ts --watch" }, "dependencies": { "@ai-sdk/openai": "^0.0.72", @@ -136,6 +138,8 @@ "zxcvbn": "^4.4.2" }, "devDependencies": { + "@graphql-codegen/cli": "5.0.5", + "@graphql-typed-document-node/core": "^3.2.0", "@radix-ui/react-use-escape-keydown": "^1.0.3", "@supabase/postgres-meta": "^0.64.4", "@tailwindcss/container-queries": "^0.1.1", diff --git a/apps/studio/scripts/codegen.ts b/apps/studio/scripts/codegen.ts new file mode 100644 index 0000000000000..d60a04bda5e3e --- /dev/null +++ b/apps/studio/scripts/codegen.ts @@ -0,0 +1,17 @@ +import type { CodegenConfig } from '@graphql-codegen/cli' + +const config: CodegenConfig = { + schema: 'scripts/schema.graphql', + documents: ['data/**/*.ts'], + ignoreNoDocuments: true, + generates: { + 'data/graphql/': { + preset: 'client', + config: { + documentMode: 'string', + }, + }, + }, +} + +export default config diff --git a/apps/studio/scripts/download-graphql-schema.mts b/apps/studio/scripts/download-graphql-schema.mts new file mode 100644 index 0000000000000..668f9a9b9ecf2 --- /dev/null +++ b/apps/studio/scripts/download-graphql-schema.mts @@ -0,0 +1,42 @@ +import { stripIndent } from 'common-tags' +import { writeFileSync } from 'node:fs' +import path from 'node:path' +import { fileURLToPath } from 'node:url' + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) + +async function downloadGraphQLSchema() { + const schemaEndpoint = 'https://supabase.com/docs/api/graphql' + const outputPath = path.join(__dirname, './schema.graphql') + + const schemaQuery = stripIndent` + query SchemaQuery { + schema + } + ` + + try { + const response = await fetch(schemaEndpoint, { + method: 'POST', + body: JSON.stringify({ + query: schemaQuery.trim(), + }), + }) + const { data, errors } = await response.json() + + if (errors) { + throw errors + } + + writeFileSync(outputPath, data.schema, 'utf8') + + console.log(`✅ Successfully downloaded GraphQL schema to ${outputPath}`) + } catch (error) { + console.error('🚨 Error generating GraphQL schema:', error) + process.exit(1) + } +} + +if (process.argv[1] === fileURLToPath(import.meta.url)) { + downloadGraphQLSchema() +} diff --git a/apps/studio/scripts/schema.graphql b/apps/studio/scripts/schema.graphql new file mode 100644 index 0000000000000..ea5b98f9344c4 --- /dev/null +++ b/apps/studio/scripts/schema.graphql @@ -0,0 +1,237 @@ +schema { + query: RootQueryType +} + +""" +A document containing content from the Supabase docs. This is a guide, which might describe a concept, or explain the steps for using or implementing a feature. +""" +type Guide implements SearchResult { + """The title of the document""" + title: String + + """The URL of the document""" + href: String + + """ + The full content of the document, including all subsections (both those matching and not matching any query string) and possibly more content + """ + content: String + + """ + The subsections of the document. If the document is returned from a search match, only matching content chunks are returned. For the full content of the original document, use the content field in the parent Guide. + """ + subsections: SubsectionCollection +} + +"""Document that matches a search query""" +interface SearchResult { + """The title of the matching result""" + title: String + + """The URL of the matching result""" + href: String + + """The full content of the matching result""" + content: String +} + +""" +A collection of content chunks from a larger document in the Supabase docs. +""" +type SubsectionCollection { + """A list of edges containing nodes in this collection""" + edges: [SubsectionEdge!]! + + """The nodes in this collection, directly accessible""" + nodes: [Subsection!]! + + """The total count of items available in this collection""" + totalCount: Int! +} + +"""An edge in a collection of Subsections""" +type SubsectionEdge { + """The Subsection at the end of the edge""" + node: Subsection! +} + +"""A content chunk taken from a larger document in the Supabase docs""" +type Subsection { + """The title of the subsection""" + title: String + + """The URL of the subsection""" + href: String + + """The content of the subsection""" + content: String +} + +""" +A reference document containing a description of a Supabase CLI command +""" +type CLICommandReference implements SearchResult { + """The title of the document""" + title: String + + """The URL of the document""" + href: String + + """The content of the reference document, as text""" + content: String +} + +""" +A reference document containing a description of a function from a Supabase client library +""" +type ClientLibraryFunctionReference implements SearchResult { + """The title of the document""" + title: String + + """The URL of the document""" + href: String + + """The content of the reference document, as text""" + content: String + + """The programming language for which the function is written""" + language: Language! + + """The name of the function or method""" + methodName: String +} + +enum Language { + JAVASCRIPT + SWIFT + DART + CSHARP + KOTLIN + PYTHON +} + +"""A document describing how to troubleshoot an issue when using Supabase""" +type TroubleshootingGuide implements SearchResult { + """The title of the troubleshooting guide""" + title: String + + """The URL of the troubleshooting guide""" + href: String + + """The full content of the troubleshooting guide""" + content: String +} + +type RootQueryType { + """Get the GraphQL schema for this endpoint""" + schema: String! + + """Search the Supabase docs for content matching a query string""" + searchDocs(query: String!, limit: Int): SearchResultCollection + + """Get the details of an error code returned from a Supabase service""" + error(code: String!, service: Service!): Error + + """Get error codes that can potentially be returned by Supabase services""" + errors( + """Returns the first n elements from the list""" + first: Int + + """Returns elements that come after the specified cursor""" + after: String + + """Returns the last n elements from the list""" + last: Int + + """Returns elements that come before the specified cursor""" + before: String + + """Filter errors by a specific Supabase service""" + service: Service + + """Filter errors by a specific error code""" + code: String + ): ErrorCollection +} + +"""A collection of search results containing content from Supabase docs""" +type SearchResultCollection { + """A list of edges containing nodes in this collection""" + edges: [SearchResultEdge!]! + + """The nodes in this collection, directly accessible""" + nodes: [SearchResult!]! + + """The total count of items available in this collection""" + totalCount: Int! +} + +"""An edge in a collection of SearchResults""" +type SearchResultEdge { + """The SearchResult at the end of the edge""" + node: SearchResult! +} + +"""An error returned by a Supabase service""" +type Error { + """ + The unique code identifying the error. The code is stable, and can be used for string matching during error handling. + """ + code: String! + + """The Supabase service that returns this error.""" + service: Service! + + """The HTTP status code returned with this error.""" + httpStatusCode: Int + + """ + A human-readable message describing the error. The message is not stable, and should not be used for string matching during error handling. Use the code instead. + """ + message: String +} + +enum Service { + AUTH + REALTIME + STORAGE +} + +"""A collection of Errors""" +type ErrorCollection { + """A list of edges containing nodes in this collection""" + edges: [ErrorEdge!]! + + """The nodes in this collection, directly accessible""" + nodes: [Error!]! + + """Pagination information""" + pageInfo: PageInfo! + + """The total count of items available in this collection""" + totalCount: Int! +} + +"""An edge in a collection of Errors""" +type ErrorEdge { + """The Error at the end of the edge""" + node: Error! + + """A cursor for use in pagination""" + cursor: String! +} + +"""Pagination information for a collection""" +type PageInfo { + """Whether there are more items after the current page""" + hasNextPage: Boolean! + + """Whether there are more items before the current page""" + hasPreviousPage: Boolean! + + """Cursor pointing to the start of the current page""" + startCursor: String + + """Cursor pointing to the end of the current page""" + endCursor: String +} \ No newline at end of file diff --git a/apps/www/_blog/2025-06-25-natural-db.mdx b/apps/www/_blog/2025-06-25-natural-db.mdx new file mode 100644 index 0000000000000..cfd92de2e621b --- /dev/null +++ b/apps/www/_blog/2025-06-25-natural-db.mdx @@ -0,0 +1,256 @@ +--- +title: 'Build a Personalized AI Assistant with Postgres' +description: 'Learn how to build a Supabase powered AI assistant that combines PostgreSQL with scheduling and external tools for long-term memory, structured data management and autonomous actions.' +categories: + - product +tags: + - postgres + - ai + - personal-assistant +date: '2025-06-25:00:00' +toc_depth: 3 +author: saxon_fletcher +image: 2025-06-10-natural-db/og.png +thumb: 2025-06-10-natural-db/thumb.png +--- + +Large Language Models are excellent at transforming unstructured text into structured data, but they face challenges when it comes to accurately retrieving that data over extended conversations. In this post, we'll leverage this core strength and combine it with Postgres, along with several complementary tools, to build a personalized AI assistant capable of long-term memory retention. + +At a high level, the system's flexibility is created by combining these core building blocks: An LLM owned database schema through an execute_sql tool, scheduled tasks for autonomy, web searches for real-time information, and MCP integrations for extended actions that may integrate with external tools. + +See it at work in the video below. + + + +## Core Pieces + +### Scoped Database Control + +The assistant uses a dedicated Postgres schema called `memories` to store all of its structured data. To ensure security, the LLM operates under a specific role, `memories_role`, which is granted permissions only within this schema. + +- **Scoped Schema**: The LLM can create tables, store data, and perform operations exclusively within the `memories` schema by calling an execute_sql tool +- **System Table Protection**: All other schemas, including `public`, are inaccessible to the LLM. + +### Messages Context + +Three complementary memory types maintain conversation continuity: + +- **Message History (Short-term Memory)**: Maintains a chronological list of recent messages for immediate context +- **Semantic Memory (Vector Search using pgvector)**: Stores conversation embeddings using pgvector for fuzzy concept retrieval ("that productivity thing we talked about last month") +- **Structured Memory (SQL Data)**: Stores concrete facts in LLM-created tables for precise queries ("How much did I spend on coffee last quarter?") + +### Scheduled Prompts + +The system achieves autonomy through scheduled prompts which are powered by pg_cron through a dedicated tool. Scheduled prompts call the same edge functions as a normal prompt via pg_net and can therefore use all the same tools. + +**Example**: "Every Sunday at 6 PM, analyze my portfolio performance and research market trends" + +1. A cron job executes the prompt every Sunday at 6 PM. +2. The LLM retrieves data from relevant tables in your memories schema, like current portfolio holdings. +3. Web search is triggered to find relevant market news and competitor analysis based on data +4. Web search results are transformed into structured data and stored in your database +5. Sends a personalized email report using Zapier MCP. +6. Future queries like "How has my portfolio performed compared to market trends?" references this data + +### Web Search + +The system leverages built-in web search capabilities from LLMs like OpenAI's web search tool to access real-time information and current events. + +```sql +-- Auto-generated from web search results +CREATE TABLE research_findings ( + topic TEXT, + source_url TEXT, + key_insights TEXT[], + credibility_score INTEGER, + search_date TIMESTAMPTZ DEFAULT NOW() +); +``` + +### Zapier MCP Integration + +Through Zapier's MCP integration, your assistant can: + +- Read/send emails (Gmail) +- Manage calendar events +- Update spreadsheets +- Send notifications (Slack, Discord, SMS) +- Create tasks (Trello, Asana, Notion) +- Control smart home devices + +### Input/Output Integration + +The system uses a Telegram Bot as the default interface which calls an edge function via webhook. You can change this to whatever interface you want, for example a web page, voice or other. + +### Self-Evolving System Prompt + +The assistant maintains two behavioral layers: + +- **Base Behavior**: Core functionality (database operations, scheduling, web search) remains consistent via a constant system prompt +- **Personalized Behavior**: Communication style and preferences that evolve based on user feedback which can be changed via a dedicated tool and stored in a public.system_prompts table + +When you say "be more formal" or "address me by name," these preferences are stored with version history and persist across all conversations, creating a personalized experience. + +## Use Cases + +### Run Tracking + +![Run tracking dashboard showing activity history and statistics](/images/blog/2025-06-10-natural-db/runs.png) + +**Prompt**: "Help me track my daily runs by sending me a reminder each morning with details on my previous days run" + +1. LLM creates a `runs` table to store distance, duration, route, weather conditions, and personal notes for each run +2. LLM also creates a cron job that fires daily +3. Every morning a scheduled prompt is sent which triggers the LLM to query the runs table and send off a run reminder via Telegram with details +4. User submits run details via Telegram which is stored in the runs table +5. Opportunity for a monthly cron job that summaries running patterns, highlight achievements, and suggest training adjustments based on progress + +### Personal Recipe & Meal Planning + +**Prompt**: "Help me track my meals and suggest recipes based on what I have in my kitchen" + +1. LLM creates `recipes`, `ingredients`, `meal_history`, and `meal_ratings` tables to store cooking experiences, dietary preferences, and meal satisfaction +2. LLM also creates a cron job that fires daily +3. Every morning a scheduled prompt is sent which triggers the LLM to query the meal_history table and suggest recipes based on available ingredients via Telegram +4. User submits meal details and ratings via Telegram which is stored in the meal_history and meal_ratings tables +5. Opportunity for a weekly cron job that analyzes cooking patterns, suggests grocery lists, and recommends new recipes based on preferences + +### Company Feedback Analysis + +**Prompt**: "Help me track customer feedback by analyzing support tickets daily and giving me weekly summaries" + +1. LLM creates a `feedback` table to store ticket analysis, themes, sentiment scores, and product areas +2. LLM also creates a cron job that fires daily +3. Every morning a scheduled prompt is sent which triggers the LLM to fetch new tickets via MCP, analyze them, and store findings in the feedback table +4. User receives daily feedback alerts via Telegram with key insights and ticket summaries +5. Opportunity for a weekly cron job that generates comprehensive feedback reports, highlighting trends and actionable insights + +### Interest-Based Article Bookmarker + +**Prompt**: "Help me track interesting articles about AI and climate change, reminding me of important ones I haven't read" + +1. LLM creates an `articles` table to store article metadata, read status, relevance scores, and user interests +2. LLM also creates a cron job that fires daily +3. Every morning a scheduled prompt is sent which triggers the LLM to search for new articles via web search, analyze relevance, and store them in the articles table +4. User receives daily article recommendations via Telegram with personalized reading suggestions +5. Opportunity for a weekly cron job that summarizes reading patterns, highlights must-read articles, and suggests new topics based on interests + +## Implementation Guide + +### Prerequisites + +- Supabase account (free tier sufficient) +- OpenAI API key +- Telegram bot token +- Zapier account (optional) + +### Optional: Using the CLI + +If you prefer the command line, you can use the Supabase CLI to set up your database and Edge Functions. This replaces **Step 1** and **Step 2**. + +1. **Clone the repository**. + ```bash + git clone https://github.com/supabase-community/natural-db.git + cd natural-db + ``` +2. **Log in to the Supabase CLI and link your project**. + Create a new project on the [Supabase Dashboard](https://supabase.com/dashboard), then run: + ```bash + supabase login + supabase link --project-ref + ``` +3. **Push the database schema**. + ```bash + supabase db push + ``` +4. **Deploy Edge Functions**. + ```bash + supabase functions deploy --no-verify-jwt + ``` + +After completing these steps, you can proceed to **Step 3: Telegram Bot**. + +### Step 1: Database Setup + +Run the migration SQL in your Supabase SQL editor: [migration.sql](https://github.com/supabase-community/natural-db/blob/main/supabase/migrations/001_create_initial_schema.sql) + +- Sets up required extensions like `pgvector` and `pg_cron`. +- Creates the `memories` schema for the assistant's data. +- Creates the `memories_role` with scoped permissions to the `memories` schema. +- Configures cron job scheduling. + +### Step 2: Edge Functions + +Create three functions in Supabase dashboard: + +**natural-db**: Main AI brain handling all processing, database operations, scheduling, and tool integration + +- [natural-db/index.ts](https://github.com/supabase-community/natural-db/blob/main/supabase/functions/natural-db/index.ts) +- [natural-db/db-utils.ts](https://github.com/supabase-community/natural-db/blob/main/supabase/functions/natural-db/db-utils.ts) +- [natural-db/tools.ts](https://github.com/supabase-community/natural-db/blob/main/supabase/functions/natural-db/tools.ts) + +**telegram-input**: Webhook handler for incoming messages with user validation and timezone management + +- [telegram-input/index.ts](https://github.com/supabase-community/natural-db/blob/main/supabase/functions/telegram-input/index.ts) + +**telegram-outgoing**: Response formatter and delivery handler with error management + +- [telegram-outgoing/index.ts](https://github.com/supabase-community/natural-db/blob/main/supabase/functions/telegram-outgoing/index.ts) + +### Step 3: Telegram Bot + +1. Create bot via [@BotFather](https://t.me/botfather) +2. Set webhook: `https://api.telegram.org/bot[TOKEN]/setWebhook?url=https://[PROJECT].supabase.co/functions/v1/telegram-input` + +### Step 4: Environment Variables + +Set the following environment variables in your Supabase project settings (Project Settings → Edge Functions): + +##### Required Variables: + +- `OPENAI_API_KEY`: Your OpenAI API key +- `TELEGRAM_BOT_TOKEN`: Bot token from @BotFather +- `ALLOWED_USERNAMES`: Comma-separated list of allowed Telegram usernames +- `TELEGRAM_WEBHOOK_SECRET`: Secret token for webhook validation + +##### Optional Variables: + +- `OPENAI_MODEL`: OpenAI model to use (defaults to "gpt-4.1-mini") +- `ZAPIER_MCP_URL`: MCP server URL for Zapier integrations + +### Step 5: Test Integration + +Try these commands with your bot: + +- "Store my grocery budget as $400 monthly" +- "What's the weather today?" (web search) +- "Remind me to exercise every Monday at 7 AM" +- "Be more enthusiastic when I discuss hobbies" (personality) + +## Cost Considerations + +Based on 10 messages per day (300 messages/month): + +- **Supabase**: Free tier (500MB database, 5GB bandwidth) - $0/month +- **OpenAI GPT-4.1-mini**: $0.40 per 1M input tokens, $1.60 per 1M output tokens + - Average 1200 input + 800 output tokens per message + - Input: 300 messages × 1200 tokens × $0.40/1M = $0.144/month + - Output: 300 messages × 800 tokens × $1.60/1M = $0.384/month + - Total OpenAI: $0.53/month +- **Telegram**: Free API usage +- **Zapier**: Free tier (300 tasks/month) - $0/month +- **Vector Embeddings**: $0.02 per 1M tokens (text-embedding-3-small) + - 300 messages × 1200 tokens × $0.02/1M = $0.0072/month + +**Total monthly cost: ~$0.54** + +## Make it your own + +This project showcases how combining modular components—with LLMs as just one piece—can create systems that are greater than the sum of their parts. I hope this inspires you to build and deploy your own personalized AI assistant while maintaining full control over your code and data. For additional inspiration, check out [this excellent post by Geoffrey Litt](https://www.geoffreylitt.com/2025/04/12/how-i-made-a-useful-ai-assistant-with-one-sqlite-table-and-a-handful-of-cron-jobs). + +Ready to build your own AI assistant? Check out the [GitHub repository](https://github.com/supabase-community/natural-db) to get started, contribute improvements, or share your own use cases. diff --git a/apps/www/public/images/blog/2025-06-10-natural-db/og.png b/apps/www/public/images/blog/2025-06-10-natural-db/og.png new file mode 100644 index 0000000000000..d6e3d2c565860 Binary files /dev/null and b/apps/www/public/images/blog/2025-06-10-natural-db/og.png differ diff --git a/apps/www/public/images/blog/2025-06-10-natural-db/runs.png b/apps/www/public/images/blog/2025-06-10-natural-db/runs.png new file mode 100644 index 0000000000000..be5b33a30966a Binary files /dev/null and b/apps/www/public/images/blog/2025-06-10-natural-db/runs.png differ diff --git a/apps/www/public/images/blog/2025-06-10-natural-db/thumb.png b/apps/www/public/images/blog/2025-06-10-natural-db/thumb.png new file mode 100644 index 0000000000000..d6e3d2c565860 Binary files /dev/null and b/apps/www/public/images/blog/2025-06-10-natural-db/thumb.png differ diff --git a/apps/www/public/rss.xml b/apps/www/public/rss.xml index 368398bf2ad8e..08f5d8d9f209f 100644 --- a/apps/www/public/rss.xml +++ b/apps/www/public/rss.xml @@ -5,9 +5,16 @@ https://supabase.com Latest news from Supabase en - Tue, 10 Jun 2025 00:00:00 -0700 + Wed, 25 Jun 2025 00:00:00 -0700 + https://supabase.com/blog/natural-db + Build a Personalized AI Assistant with Postgres + https://supabase.com/blog/natural-db + Learn how to build a Supabase powered AI assistant that combines PostgreSQL with scheduling and external tools for long-term memory, structured data management and autonomous actions. + Wed, 25 Jun 2025 00:00:00 -0700 + + https://supabase.com/blog/multigres-vitess-for-postgres Announcing Multigres: Vitess for Postgres https://supabase.com/blog/multigres-vitess-for-postgres diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e6bec8f8f350d..8841ae78d9684 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -985,6 +985,12 @@ importers: specifier: ^4.4.2 version: 4.4.2 devDependencies: + '@graphql-codegen/cli': + specifier: 5.0.5 + version: 5.0.5(@parcel/watcher@2.5.1)(@types/node@22.13.14)(encoding@0.1.13)(graphql-sock@1.0.1(graphql@16.10.0))(graphql@16.10.0)(supports-color@8.1.1)(typescript@5.5.2) + '@graphql-typed-document-node/core': + specifier: ^3.2.0 + version: 3.2.0(graphql@16.10.0) '@radix-ui/react-use-escape-keydown': specifier: ^1.0.3 version: 1.1.0(@types/react@18.3.3)(react@18.3.1) @@ -12611,9 +12617,6 @@ packages: resolution: {integrity: sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==} hasBin: true - jose@5.2.1: - resolution: {integrity: sha512-qiaQhtQRw6YrOaOj0v59h3R6hUY9NvxBmmnMfKemkqYmBB0tEc97NbLP7ix44VP5p9/0YHG8Vyhzuo5YBNwviA==} - jose@5.9.6: resolution: {integrity: sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ==} @@ -20381,12 +20384,12 @@ snapshots: '@whatwg-node/fetch': 0.10.6 chalk: 4.1.2 debug: 4.4.0(supports-color@8.1.1) - dotenv: 16.4.7 + dotenv: 16.5.0 graphql: 16.10.0 graphql-request: 6.1.0(encoding@0.1.13)(graphql@16.10.0) http-proxy-agent: 7.0.2(supports-color@8.1.1) https-proxy-agent: 7.0.6(supports-color@8.1.1) - jose: 5.2.1 + jose: 5.9.6 js-yaml: 4.1.0 lodash: 4.17.21 scuid: 1.1.0 @@ -31464,8 +31467,6 @@ snapshots: jiti@2.4.2: {} - jose@5.2.1: {} - jose@5.9.6: {} jotai@2.8.1(@types/react@18.3.3)(react@18.3.1): diff --git a/turbo.json b/turbo.json index 44a41baf267de..4002332fed4c8 100644 --- a/turbo.json +++ b/turbo.json @@ -45,6 +45,7 @@ "env": [ "ANALYZE", "NEXT_PUBLIC_SUPPORT_API_URL", + "NEXT_PUBLIC_CONTENT_API_URL", "NEXT_PUBLIC_BASE_PATH", "NEXT_PUBLIC_STRIPE_PUBLIC_KEY", "NEXT_PUBLIC_SUPPORT_ANON_KEY",