diff --git a/nix/tests/expected/docs-array-test.out b/nix/tests/expected/docs-array-test.out new file mode 100644 index 000000000..5b27d9ba5 --- /dev/null +++ b/nix/tests/expected/docs-array-test.out @@ -0,0 +1,19 @@ +-- testing sql found in https://supabase.com/docs/guides/database/arrays +create table arraytest ( + id integer not null, + textarray text array +); +INSERT INTO arraytest (id, textarray) VALUES (1, ARRAY['Harry', 'Larry', 'Moe']);; +select * from arraytest; + id | textarray +----+------------------- + 1 | {Harry,Larry,Moe} +(1 row) + +SELECT textarray[1], array_length(textarray, 1) FROM arraytest; + textarray | array_length +-----------+-------------- + Harry | 3 +(1 row) + +drop table arraytest cascade; diff --git a/nix/tests/expected/docs-cascades-deletes.out b/nix/tests/expected/docs-cascades-deletes.out new file mode 100644 index 000000000..1f5d5f777 --- /dev/null +++ b/nix/tests/expected/docs-cascades-deletes.out @@ -0,0 +1,373 @@ +-- testing sql found in https://supabase.com/docs/guides/database/postgres/cascades-deletes +-- all of the errors produced by this file are expected +create table grandparent ( + id serial primary key, + name text +); +create table parent ( + id serial primary key, + name text, + parent_id integer references grandparent (id) + on delete cascade +); +create table child ( + id serial primary key, + name text, + father integer references parent (id) + on delete restrict +); +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); +insert into child + (id, name, father) +values + (1, 'William', 1); +select count(*) from grandparent; + count +------- + 1 +(1 row) + +select count(*) from parent; + count +------- + 2 +(1 row) + +select count(*) from child; + count +------- + 1 +(1 row) + +delete from grandparent; +ERROR: update or delete on table "parent" violates foreign key constraint "child_father_fkey" on table "child" +DETAIL: Key (id)=(1) is still referenced from table "child". +select count(*) from grandparent; + count +------- + 1 +(1 row) + +select count(*) from parent; + count +------- + 2 +(1 row) + +select count(*) from child; + count +------- + 1 +(1 row) + +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); +ERROR: duplicate key value violates unique constraint "grandparent_pkey" +DETAIL: Key (id)=(1) already exists. +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); +ERROR: duplicate key value violates unique constraint "parent_pkey" +DETAIL: Key (id)=(1) already exists. +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); +ERROR: duplicate key value violates unique constraint "parent_pkey" +DETAIL: Key (id)=(2) already exists. +insert into child + (id, name, father) +values + (1, 'William', 1); +ERROR: duplicate key value violates unique constraint "child_pkey" +DETAIL: Key (id)=(1) already exists. +alter table child +drop constraint child_father_fkey; +alter table child +add constraint child_father_fkey foreign key (father) references parent (id) + on delete no action; +delete from grandparent; +ERROR: update or delete on table "parent" violates foreign key constraint "child_father_fkey" on table "child" +DETAIL: Key (id)=(1) is still referenced from table "child". +select count(*) from grandparent; + count +------- + 1 +(1 row) + +select count(*) from parent; + count +------- + 2 +(1 row) + +select count(*) from child; + count +------- + 1 +(1 row) + +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); +ERROR: duplicate key value violates unique constraint "grandparent_pkey" +DETAIL: Key (id)=(1) already exists. +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); +ERROR: duplicate key value violates unique constraint "parent_pkey" +DETAIL: Key (id)=(1) already exists. +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); +ERROR: duplicate key value violates unique constraint "parent_pkey" +DETAIL: Key (id)=(2) already exists. +insert into child + (id, name, father) +values + (1, 'William', 1); +ERROR: duplicate key value violates unique constraint "child_pkey" +DETAIL: Key (id)=(1) already exists. +alter table child +drop constraint child_father_fkey; +alter table child +add constraint child_father_fkey foreign key (father) references parent (id) + on delete no action initially deferred; +delete from grandparent; +ERROR: update or delete on table "parent" violates foreign key constraint "child_father_fkey" on table "child" +DETAIL: Key (id)=(1) is still referenced from table "child". +select count(*) from grandparent; + count +------- + 1 +(1 row) + +select count(*) from parent; + count +------- + 2 +(1 row) + +select count(*) from child; + count +------- + 1 +(1 row) + +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); +ERROR: duplicate key value violates unique constraint "grandparent_pkey" +DETAIL: Key (id)=(1) already exists. +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); +ERROR: duplicate key value violates unique constraint "parent_pkey" +DETAIL: Key (id)=(1) already exists. +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); +ERROR: duplicate key value violates unique constraint "parent_pkey" +DETAIL: Key (id)=(2) already exists. +insert into child + (id, name, father) +values + (1, 'William', 1); +ERROR: duplicate key value violates unique constraint "child_pkey" +DETAIL: Key (id)=(1) already exists. +alter table child +add column mother integer references parent (id) + on delete cascade; +update child +set mother = 2 +where id = 1; +delete from grandparent; +select count(*) from grandparent; + count +------- + 0 +(1 row) + +select count(*) from parent; + count +------- + 0 +(1 row) + +select count(*) from child; + count +------- + 0 +(1 row) + +create table test_cascade ( + id serial primary key, + name text +); +create table test_cascade_child ( + id serial primary key, + parent_id integer references test_cascade (id) on delete cascade, + name text +); +insert into test_cascade (name) values ('Parent'); +insert into test_cascade_child (parent_id, name) values (1, 'Child'); +delete from test_cascade; +select count(*) from test_cascade; + count +------- + 0 +(1 row) + +select count(*) from test_cascade_child; + count +------- + 0 +(1 row) + +create table test_restrict ( + id serial primary key, + name text +); +create table test_restrict_child ( + id serial primary key, + parent_id integer references test_restrict (id) on delete restrict, + name text +); +insert into test_restrict (name) values ('Parent'); +insert into test_restrict_child (parent_id, name) values (1, 'Child'); +delete from test_restrict; +ERROR: update or delete on table "test_restrict" violates foreign key constraint "test_restrict_child_parent_id_fkey" on table "test_restrict_child" +DETAIL: Key (id)=(1) is still referenced from table "test_restrict_child". +select count(*) from test_restrict; + count +------- + 1 +(1 row) + +select count(*) from test_restrict_child; + count +------- + 1 +(1 row) + +create table test_set_null ( + id serial primary key, + name text +); +create table test_set_null_child ( + id serial primary key, + parent_id integer references test_set_null (id) on delete set null, + name text +); +insert into test_set_null (name) values ('Parent'); +insert into test_set_null_child (parent_id, name) values (1, 'Child'); +delete from test_set_null; +select count(*) from test_set_null; + count +------- + 0 +(1 row) + +select count(*) from test_set_null_child; + count +------- + 1 +(1 row) + +select parent_id from test_set_null_child; + parent_id +----------- + +(1 row) + +create table test_set_default ( + id serial primary key, + name text +); +create table test_set_default_child ( + id serial primary key, + parent_id integer default 999 references test_set_default (id) on delete set default, + name text +); +insert into test_set_default (name) values ('Parent'); +insert into test_set_default_child (parent_id, name) values (1, 'Child'); +delete from test_set_default; +ERROR: insert or update on table "test_set_default_child" violates foreign key constraint "test_set_default_child_parent_id_fkey" +DETAIL: Key (parent_id)=(999) is not present in table "test_set_default". +select count(*) from test_set_default; + count +------- + 1 +(1 row) + +select count(*) from test_set_default_child; + count +------- + 1 +(1 row) + +select parent_id from test_set_default_child; + parent_id +----------- + 1 +(1 row) + +create table test_no_action ( + id serial primary key, + name text +); +create table test_no_action_child ( + id serial primary key, + parent_id integer references test_no_action (id) on delete no action, + name text +); +insert into test_no_action (name) values ('Parent'); +insert into test_no_action_child (parent_id, name) values (1, 'Child'); +delete from test_no_action; +ERROR: update or delete on table "test_no_action" violates foreign key constraint "test_no_action_child_parent_id_fkey" on table "test_no_action_child" +DETAIL: Key (id)=(1) is still referenced from table "test_no_action_child". +select count(*) from test_no_action; + count +------- + 1 +(1 row) + +select count(*) from test_no_action_child; + count +------- + 1 +(1 row) + +drop table if exists test_cascade_child; +drop table if exists test_cascade; +drop table if exists test_restrict_child; +drop table if exists test_restrict; +drop table if exists test_set_null_child; +drop table if exists test_set_null; +drop table if exists test_set_default_child; +drop table if exists test_set_default; +drop table if exists test_no_action_child; +drop table if exists test_no_action; +drop table if exists child; +drop table if exists parent; +drop table if exists grandparent; diff --git a/nix/tests/expected/docs-connections.out b/nix/tests/expected/docs-connections.out new file mode 100644 index 000000000..d4e6d1106 --- /dev/null +++ b/nix/tests/expected/docs-connections.out @@ -0,0 +1,26 @@ +-- testing sql found in https://supabase.com/docs/guides/database/connection-management +-- we can't test every sql statement in this doc because their results won't be deterministic +select + ssl, + datname as database, + usename as connected_role, + application_name, + query, + state +from pg_stat_ssl +join pg_stat_activity +on pg_stat_ssl.pid = pg_stat_activity.pid; + ssl | database | connected_role | application_name | query | state +-----+----------+----------------+-----------------------------+--------------------------------------------+-------- + f | postgres | supabase_admin | pg_regress/docs-connections | select +| active + | | | | ssl, +| + | | | | datname as database, +| + | | | | usename as connected_role, +| + | | | | application_name, +| + | | | | query, +| + | | | | state +| + | | | | from pg_stat_ssl +| + | | | | join pg_stat_activity +| + | | | | on pg_stat_ssl.pid = pg_stat_activity.pid; | +(1 row) + diff --git a/nix/tests/expected/docs-enums.out b/nix/tests/expected/docs-enums.out new file mode 100644 index 000000000..da2d06d42 --- /dev/null +++ b/nix/tests/expected/docs-enums.out @@ -0,0 +1,185 @@ +-- testing sql found in https://supabase.com/docs/guides/database/postgresenums +create type mood as enum ( + 'happy', + 'sad', + 'excited', + 'calm' +); +create table person ( + id serial primary key, + name text, + current_mood mood +); +insert into person + (name, current_mood) +values + ('Alice', 'happy'); +insert into person + (name, current_mood) +values + ('Bob', 'sad'); +insert into person + (name, current_mood) +values + ('Charlie', 'excited'); +select * +from person +where current_mood = 'sad'; + id | name | current_mood +----+------+-------------- + 2 | Bob | sad +(1 row) + +select * +from person +where current_mood = 'happy'; + id | name | current_mood +----+-------+-------------- + 1 | Alice | happy +(1 row) + +update person +set current_mood = 'excited' +where name = 'Alice'; +select * +from person +where name = 'Alice'; + id | name | current_mood +----+-------+-------------- + 1 | Alice | excited +(1 row) + +alter type mood add value 'content'; +insert into person + (name, current_mood) +values + ('David', 'content'); +select enum_range(null::mood); + enum_range +---------------------------------- + {happy,sad,excited,calm,content} +(1 row) + +select * +from person +where current_mood = 'content'; + id | name | current_mood +----+-------+-------------- + 4 | David | content +(1 row) + +create type status as enum ( + 'active', + 'inactive', + 'pending' +); +create table orders ( + id serial primary key, + order_number text, + status status +); +insert into orders + (order_number, status) +values + ('ORD-001', 'active'), + ('ORD-002', 'pending'), + ('ORD-003', 'inactive'); +select * +from orders +where status = 'active'; + id | order_number | status +----+--------------+-------- + 1 | ORD-001 | active +(1 row) + +update orders +set status = 'inactive' +where order_number = 'ORD-002'; +select * +from orders +where order_number = 'ORD-002'; + id | order_number | status +----+--------------+---------- + 2 | ORD-002 | inactive +(1 row) + +alter type status add value 'cancelled'; +insert into orders + (order_number, status) +values + ('ORD-004', 'cancelled'); +select enum_range(null::status); + enum_range +------------------------------------- + {active,inactive,pending,cancelled} +(1 row) + +select * +from orders +where status = 'cancelled'; + id | order_number | status +----+--------------+----------- + 4 | ORD-004 | cancelled +(1 row) + +create type priority as enum ( + 'low', + 'medium', + 'high', + 'critical' +); +create table tasks ( + id serial primary key, + title text, + priority priority +); +insert into tasks + (title, priority) +values + ('Fix bug', 'high'), + ('Update docs', 'low'), + ('Security audit', 'critical'); +select * +from tasks +where priority = 'critical'; + id | title | priority +----+----------------+---------- + 3 | Security audit | critical +(1 row) + +update tasks +set priority = 'medium' +where title = 'Update docs'; +select * +from tasks +where title = 'Update docs'; + id | title | priority +----+-------------+---------- + 2 | Update docs | medium +(1 row) + +alter type priority add value 'urgent'; +insert into tasks + (title, priority) +values + ('Server maintenance', 'urgent'); +select enum_range(null::priority); + enum_range +----------------------------------- + {low,medium,high,critical,urgent} +(1 row) + +select * +from tasks +where priority = 'urgent'; + id | title | priority +----+--------------------+---------- + 4 | Server maintenance | urgent +(1 row) + +drop table tasks; +drop table orders; +drop table person; +drop type priority; +drop type status; +drop type mood; diff --git a/nix/tests/expected/docs-full-text-search.out b/nix/tests/expected/docs-full-text-search.out new file mode 100644 index 000000000..476c1ca02 --- /dev/null +++ b/nix/tests/expected/docs-full-text-search.out @@ -0,0 +1,296 @@ +-- testing sql found in https://supabase.com/docs/guides/database/full-text-search +create table books ( + id serial primary key, + title text, + author text, + description text +); +insert into books + (title, author, description) +values + ( + 'The Poky Little Puppy', + 'Janette Sebring Lowrey', + 'Puppy is slower than other, bigger animals.' + ), + ('The Tale of Peter Rabbit', 'Beatrix Potter', 'Rabbit eats some vegetables.'), + ('Tootle', 'Gertrude Crampton', 'Little toy train has big dreams.'), + ( + 'Green Eggs and Ham', + 'Dr. Seuss', + 'Sam has changing food preferences and eats unusually colored food.' + ), + ( + 'Harry Potter and the Goblet of Fire', + 'J.K. Rowling', + 'Fourth year of school starts, big drama ensues.' + ); +select to_tsvector('green eggs and ham'); + to_tsvector +--------------------------- + 'egg':2 'green':1 'ham':4 +(1 row) + +select to_tsvector('english', 'green eggs and ham'); + to_tsvector +--------------------------- + 'egg':2 'green':1 'ham':4 +(1 row) + +select * +from books +where title = 'Harry'; + id | title | author | description +----+-------+--------+------------- +(0 rows) + +select * +from books +where to_tsvector(title) @@ to_tsquery('Harry'); + id | title | author | description +----+-------------------------------------+--------------+------------------------------------------------- + 5 | Harry Potter and the Goblet of Fire | J.K. Rowling | Fourth year of school starts, big drama ensues. +(1 row) + +select + * +from + books +where + to_tsvector(description) + @@ to_tsquery('big'); + id | title | author | description +----+-------------------------------------+-------------------+------------------------------------------------- + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. + 5 | Harry Potter and the Goblet of Fire | J.K. Rowling | Fourth year of school starts, big drama ensues. +(2 rows) + +select + * +from + books +where + to_tsvector(description || ' ' || title) + @@ to_tsquery('little'); + id | title | author | description +----+-----------------------+------------------------+--------------------------------------------- + 1 | The Poky Little Puppy | Janette Sebring Lowrey | Puppy is slower than other, bigger animals. + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. +(2 rows) + +create function title_description(books) returns text as $$ + select $1.title || ' ' || $1.description; +$$ language sql immutable; +select + * +from + books +where + to_tsvector(title_description(books.*)) + @@ to_tsquery('little'); + id | title | author | description +----+-----------------------+------------------------+--------------------------------------------- + 1 | The Poky Little Puppy | Janette Sebring Lowrey | Puppy is slower than other, bigger animals. + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. +(2 rows) + +select + * +from + books +where + to_tsvector(description) + @@ to_tsquery('little & big'); + id | title | author | description +----+--------+-------------------+---------------------------------- + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. +(1 row) + +select + * +from + books +where + to_tsvector(description) + @@ to_tsquery('little | big'); + id | title | author | description +----+-------------------------------------+-------------------+------------------------------------------------- + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. + 5 | Harry Potter and the Goblet of Fire | J.K. Rowling | Fourth year of school starts, big drama ensues. +(2 rows) + +select title from books where to_tsvector(title) @@ to_tsquery('Lit:*'); + title +----------------------- + The Poky Little Puppy +(1 row) + +create or replace function search_books_by_title_prefix(prefix text) +returns setof books AS $$ +begin + return query + select * from books where to_tsvector('english', title) @@ to_tsquery(prefix || ':*'); +end; +$$ language plpgsql; +select * from search_books_by_title_prefix('Lit'); + id | title | author | description +----+-----------------------+------------------------+--------------------------------------------- + 1 | The Poky Little Puppy | Janette Sebring Lowrey | Puppy is slower than other, bigger animals. +(1 row) + +select * from search_books_by_title_prefix('Little+Puppy'); + id | title | author | description +----+-----------------------+------------------------+--------------------------------------------- + 1 | The Poky Little Puppy | Janette Sebring Lowrey | Puppy is slower than other, bigger animals. +(1 row) + +alter table + books +add column + fts tsvector generated always as (to_tsvector('english', description || ' ' || title)) stored; +create index books_fts on books using gin (fts); +select id, fts +from books; + id | fts +----+----------------------------------------------------------------------------------------------------------------- + 1 | 'anim':7 'bigger':6 'littl':10 'poki':9 'puppi':1,11 'slower':3 + 2 | 'eat':2 'peter':8 'rabbit':1,9 'tale':6 'veget':4 + 3 | 'big':5 'dream':6 'littl':1 'tootl':7 'toy':2 'train':3 + 4 | 'chang':3 'color':9 'eat':7 'egg':12 'food':4,10 'green':11 'ham':14 'prefer':5 'sam':1 'unusu':8 + 5 | 'big':6 'drama':7 'ensu':8 'fire':15 'fourth':1 'goblet':13 'harri':9 'potter':10 'school':4 'start':5 'year':2 +(5 rows) + +select + * +from + books +where + fts @@ to_tsquery('little & big'); + id | title | author | description | fts +----+--------+-------------------+----------------------------------+--------------------------------------------------------- + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. | 'big':5 'dream':6 'littl':1 'tootl':7 'toy':2 'train':3 +(1 row) + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('big <-> dreams'); + id | title | author | description | fts +----+--------+-------------------+----------------------------------+--------------------------------------------------------- + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. | 'big':5 'dream':6 'littl':1 'tootl':7 'toy':2 'train':3 +(1 row) + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('year <2> school'); + id | title | author | description | fts +----+-------------------------------------+--------------+-------------------------------------------------+----------------------------------------------------------------------------------------------------------------- + 5 | Harry Potter and the Goblet of Fire | J.K. Rowling | Fourth year of school starts, big drama ensues. | 'big':6 'drama':7 'ensu':8 'fire':15 'fourth':1 'goblet':13 'harri':9 'potter':10 'school':4 'start':5 'year':2 +(1 row) + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('big & !little'); + id | title | author | description | fts +----+-------------------------------------+--------------+-------------------------------------------------+----------------------------------------------------------------------------------------------------------------- + 5 | Harry Potter and the Goblet of Fire | J.K. Rowling | Fourth year of school starts, big drama ensues. | 'big':6 'drama':7 'ensu':8 'fire':15 'fourth':1 'goblet':13 'harri':9 'potter':10 'school':4 'start':5 'year':2 +(1 row) + +select + * +from + books +where + to_tsvector(title) @@ to_tsquery('harry & potter'); + id | title | author | description | fts +----+-------------------------------------+--------------+-------------------------------------------------+----------------------------------------------------------------------------------------------------------------- + 5 | Harry Potter and the Goblet of Fire | J.K. Rowling | Fourth year of school starts, big drama ensues. | 'big':6 'drama':7 'ensu':8 'fire':15 'fourth':1 'goblet':13 'harri':9 'potter':10 'school':4 'start':5 'year':2 +(1 row) + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('food & !egg'); + id | title | author | description | fts +----+--------------------+-----------+--------------------------------------------------------------------+--------------------------------------------------------------------------------------------------- + 4 | Green Eggs and Ham | Dr. Seuss | Sam has changing food preferences and eats unusually colored food. | 'chang':3 'color':9 'eat':7 'egg':12 'food':4,10 'green':11 'ham':14 'prefer':5 'sam':1 'unusu':8 +(1 row) + +select + * +from + books +where + to_tsvector(title || ' ' || description) @@ to_tsquery('train & toy'); + id | title | author | description | fts +----+--------+-------------------+----------------------------------+--------------------------------------------------------- + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. | 'big':5 'dream':6 'littl':1 'tootl':7 'toy':2 'train':3 +(1 row) + +select + * +from + books +where + fts @@ to_tsquery('puppy & slow'); + id | title | author | description | fts +----+-------+--------+-------------+----- +(0 rows) + +select + * +from + books +where + fts @@ to_tsquery('rabbit | peter'); + id | title | author | description | fts +----+--------------------------+----------------+------------------------------+--------------------------------------------------- + 2 | The Tale of Peter Rabbit | Beatrix Potter | Rabbit eats some vegetables. | 'eat':2 'peter':8 'rabbit':1,9 'tale':6 'veget':4 +(1 row) + +select + * +from + books +where + fts @@ to_tsquery('harry <-> potter'); + id | title | author | description | fts +----+-------------------------------------+--------------+-------------------------------------------------+----------------------------------------------------------------------------------------------------------------- + 5 | Harry Potter and the Goblet of Fire | J.K. Rowling | Fourth year of school starts, big drama ensues. | 'big':6 'drama':7 'ensu':8 'fire':15 'fourth':1 'goblet':13 'harri':9 'potter':10 'school':4 'start':5 'year':2 +(1 row) + +select + * +from + books +where + fts @@ to_tsquery('fourth <3> year'); + id | title | author | description | fts +----+-------+--------+-------------+----- +(0 rows) + +select + * +from + books +where + fts @@ to_tsquery('big & !drama'); + id | title | author | description | fts +----+--------+-------------------+----------------------------------+--------------------------------------------------------- + 3 | Tootle | Gertrude Crampton | Little toy train has big dreams. | 'big':5 'dream':6 'littl':1 'tootl':7 'toy':2 'train':3 +(1 row) + +drop function search_books_by_title_prefix(text); +drop function title_description(books); +drop table books; + diff --git a/nix/tests/expected/docs-functions.out b/nix/tests/expected/docs-functions.out new file mode 100644 index 000000000..1a2c6085c --- /dev/null +++ b/nix/tests/expected/docs-functions.out @@ -0,0 +1,230 @@ +-- testing sql found in https://supabase.com/docs/guides/database/functions +create or replace function hello_world() +returns text +language sql +as $$ + select 'hello world'; +$$; +select hello_world(); + hello_world +------------- + hello world +(1 row) + +create table planets ( + id serial primary key, + name text +); +insert into planets + (id, name) +values + (1, 'Tattoine'), + (2, 'Alderaan'), + (3, 'Kashyyyk'); +create table people ( + id serial primary key, + name text, + planet_id bigint references planets +); +insert into people + (id, name, planet_id) +values + (1, 'Anakin Skywalker', 1), + (2, 'Luke Skywalker', 1), + (3, 'Princess Leia', 2), + (4, 'Chewbacca', 3); +create or replace function get_planets() +returns setof planets +language sql +as $$ + select * from planets; +$$; +select * +from get_planets() +where id = 1; + id | name +----+---------- + 1 | Tattoine +(1 row) + +create or replace function add_planet(name text) +returns bigint +language plpgsql +as $$ +declare + new_row bigint; +begin + insert into planets(name) + values (add_planet.name) + returning id into new_row; + + return new_row; +end; +$$; +select * from add_planet('Jakku'); +ERROR: duplicate key value violates unique constraint "planets_pkey" +DETAIL: Key (id)=(1) already exists. +CONTEXT: SQL statement "insert into planets(name) + values (add_planet.name) + returning id" +PL/pgSQL function add_planet(text) line 5 at SQL statement +create function hello_world_definer() +returns text +language plpgsql +security definer set search_path = '' +as $$ +begin + select 'hello world'; +end; +$$; +select hello_world_definer(); +ERROR: query has no destination for result data +HINT: If you want to discard the results of a SELECT, use PERFORM instead. +CONTEXT: PL/pgSQL function public.hello_world_definer() line 3 at SQL statement +revoke execute on function public.hello_world from public; +revoke execute on function public.hello_world from anon; +grant execute on function public.hello_world to authenticated; +revoke execute on all functions in schema public from public; +revoke execute on all functions in schema public from anon, authenticated; +alter default privileges in schema public revoke execute on functions from public; +alter default privileges in schema public revoke execute on functions from anon, authenticated; +grant execute on function public.hello_world to authenticated; +create function logging_example( + log_message text, + warning_message text, + error_message text +) +returns void +language plpgsql +as $$ +begin + raise log 'logging message: %', log_message; + raise warning 'logging warning: %', warning_message; + raise exception 'logging error: %', error_message; +end; +$$; +select logging_example('LOGGED MESSAGE', 'WARNING MESSAGE', 'ERROR MESSAGE'); +WARNING: logging warning: WARNING MESSAGE +ERROR: logging error: ERROR MESSAGE +CONTEXT: PL/pgSQL function logging_example(text,text,text) line 5 at RAISE +create or replace function error_if_null(some_val text) +returns text +language plpgsql +as $$ +begin + if some_val is null then + raise exception 'some_val should not be NULL'; + end if; + return some_val; +end; +$$; +select error_if_null('not null'); + error_if_null +--------------- + not null +(1 row) + +create table attendance_table ( + id uuid primary key, + student text +); +insert into attendance_table (id, student) values ('123e4567-e89b-12d3-a456-426614174000', 'Harry Potter'); +create function assert_example(name text) +returns uuid +language plpgsql +as $$ +declare + student_id uuid; +begin + select + id into student_id + from attendance_table + where student = name; + + assert student_id is not null, 'assert_example() ERROR: student not found'; + + return student_id; +end; +$$; +select assert_example('Harry Potter'); + assert_example +-------------------------------------- + 123e4567-e89b-12d3-a456-426614174000 +(1 row) + +create function error_example() +returns void +language plpgsql +as $$ +begin + select * from table_that_does_not_exist; + + exception + when others then + raise exception 'An error occurred in function : %', sqlerrm; +end; +$$; +select error_example(); +ERROR: An error occurred in function : relation "table_that_does_not_exist" does not exist +CONTEXT: PL/pgSQL function error_example() line 7 at RAISE +create table some_table ( + col_1 int, + col_2 text +); +insert into some_table (col_1, col_2) values (42, 'test value'); +create or replace function advanced_example(num int default 10) +returns text +language plpgsql +as $$ +declare + var1 int := 20; + var2 text; +begin + raise log 'logging start of function call: (%)', (select now()); + + select + col_1 into var1 + from some_table + limit 1; + raise log 'logging a variable (%)', var1; + + raise log 'logging a query with a single return value(%)', (select col_1 from some_table limit 1); + + raise log 'logging an entire row as JSON (%)', (select to_jsonb(some_table.*) from some_table limit 1); + + insert into some_table (col_2) + values ('new val') + returning col_2 into var2; + + raise log 'logging a value from an INSERT (%)', var2; + + return var1 || ',' || var2; +exception + when others then + raise exception 'An error occurred in function : %', sqlerrm; +end; +$$; +select advanced_example(); + advanced_example +------------------ + 42,new val +(1 row) + +drop function advanced_example(int); +drop function error_example(); +drop function assert_example(text); +drop function error_if_null(text); +drop function logging_example(text, text, text); +drop function hello_world_definer(); +drop function add_planet(text); +drop function get_planets(); +drop function hello_world(); +drop table people; +drop table planets; +drop table attendance_table; +drop table some_table; +grant execute on all functions in schema public to public; +grant execute on all functions in schema public to anon, authenticated; +alter default privileges in schema public grant execute on functions to public; +alter default privileges in schema public grant execute on functions to anon, authenticated; + diff --git a/nix/tests/expected/docs-indexes.out b/nix/tests/expected/docs-indexes.out new file mode 100644 index 000000000..fbdaeab79 --- /dev/null +++ b/nix/tests/expected/docs-indexes.out @@ -0,0 +1,22 @@ +-- testing sql found in https://supabase.com/docs/guides/database/indexes +create table persons ( + id bigint generated by default as identity primary key, + age int, + height int, + weight int, + name text, + deceased boolean +); +insert into persons (age, height, weight, name, deceased) values (32, 180, 70, 'John Doe', false); +select name from persons where age = 32; + name +---------- + John Doe +(1 row) + +create index idx_persons_age on persons (age); +create index idx_living_persons_age on persons (age) where deceased is false; +create index idx_persons_age_desc on persons (age desc nulls last); +reindex index concurrently idx_persons_age; +reindex table concurrently persons; +drop table persons cascade; diff --git a/nix/tests/expected/docs-json.out b/nix/tests/expected/docs-json.out new file mode 100644 index 000000000..c77a86b74 --- /dev/null +++ b/nix/tests/expected/docs-json.out @@ -0,0 +1,52 @@ +-- testing sql found in https://supabase.com/docs/guides/database/json +create table books ( + id serial primary key, + title text, + author text, + metadata jsonb +); +insert into books + (title, author, metadata) +values + ( + 'The Poky Little Puppy', + 'Janette Sebring Lowrey', + '{"description":"Puppy is slower than other, bigger animals.","price":5.95,"ages":[3,6]}' + ), + ( + 'The Tale of Peter Rabbit', + 'Beatrix Potter', + '{"description":"Rabbit eats some vegetables.","price":4.49,"ages":[2,5]}' + ), + ( + 'Tootle', + 'Gertrude Crampton', + '{"description":"Little toy train has big dreams.","price":3.99,"ages":[2,5]}' + ), + ( + 'Green Eggs and Ham', + 'Dr. Seuss', + '{"description":"Sam has changing food preferences and eats unusually colored food.","price":7.49,"ages":[4,8]}' + ), + ( + 'Harry Potter and the Goblet of Fire', + 'J.K. Rowling', + '{"description":"Fourth year of school starts, big drama ensues.","price":24.95,"ages":[10,99]}' + ); +select + title, + metadata ->> 'description' as description, -- returned as text + metadata -> 'price' as price, + metadata -> 'ages' -> 0 as low_age, + metadata -> 'ages' -> 1 as high_age +from books; + title | description | price | low_age | high_age +-------------------------------------+--------------------------------------------------------------------+-------+---------+---------- + The Poky Little Puppy | Puppy is slower than other, bigger animals. | 5.95 | 3 | 6 + The Tale of Peter Rabbit | Rabbit eats some vegetables. | 4.49 | 2 | 5 + Tootle | Little toy train has big dreams. | 3.99 | 2 | 5 + Green Eggs and Ham | Sam has changing food preferences and eats unusually colored food. | 7.49 | 4 | 8 + Harry Potter and the Goblet of Fire | Fourth year of school starts, big drama ensues. | 24.95 | 10 | 99 +(5 rows) + +drop table books cascade; diff --git a/nix/tests/expected/docs-partitioning.out b/nix/tests/expected/docs-partitioning.out new file mode 100644 index 000000000..9ab265c17 --- /dev/null +++ b/nix/tests/expected/docs-partitioning.out @@ -0,0 +1,110 @@ +-- testing sql found in https://supabase.com/docs/guides/database/partitioning +create table sales ( + id bigint generated by default as identity, + order_date date not null, + customer_id bigint, + amount bigint, + primary key (order_date, id) +) +partition by range (order_date); +create table sales_2000_01 + partition of sales + for values from ('2000-01-01') to ('2000-02-01'); +create table sales_2000_02 + partition of sales + for values from ('2000-02-01') to ('2000-03-01'); +insert into sales (order_date, customer_id, amount) values + ('2000-01-15', 1, 100), + ('2000-01-20', 2, 200), + ('2000-02-10', 3, 150), + ('2000-02-25', 4, 300); +select * from sales where order_date >= '2000-01-01' and order_date < '2000-03-01'; + id | order_date | customer_id | amount +----+------------+-------------+-------- + 1 | 01-15-2000 | 1 | 100 + 2 | 01-20-2000 | 2 | 200 + 3 | 02-10-2000 | 3 | 150 + 4 | 02-25-2000 | 4 | 300 +(4 rows) + +select * from sales_2000_02; + id | order_date | customer_id | amount +----+------------+-------------+-------- + 3 | 02-10-2000 | 3 | 150 + 4 | 02-25-2000 | 4 | 300 +(2 rows) + +drop table sales cascade; +create table customers ( + id bigint generated by default as identity, + name text, + country text, + primary key (country, id) +) +partition by list(country); +create table customers_americas + partition of customers + for values in ('US', 'CANADA'); +create table customers_asia + partition of customers + for values in ('INDIA', 'CHINA', 'JAPAN'); +insert into customers (name, country) values + ('John Doe', 'US'), + ('Jane Smith', 'CANADA'), + ('Li Wei', 'CHINA'), + ('Priya Patel', 'INDIA'), + ('Yuki Tanaka', 'JAPAN'); +select * from customers where country in ('US', 'CANADA'); + id | name | country +----+------------+--------- + 1 | John Doe | US + 2 | Jane Smith | CANADA +(2 rows) + +select * from customers_asia; + id | name | country +----+-------------+--------- + 3 | Li Wei | CHINA + 4 | Priya Patel | INDIA + 5 | Yuki Tanaka | JAPAN +(3 rows) + +drop table customers cascade; +create table products ( + id bigint generated by default as identity, + name text, + category text, + price bigint +) +partition by hash (id); +create table products_one + partition of products + for values with (modulus 2, remainder 1); +create table products_two + partition of products + for values with (modulus 2, remainder 0); +insert into products (name, category, price) values + ('Laptop', 'Electronics', 999), + ('Phone', 'Electronics', 599), + ('Book', 'Education', 29), + ('Chair', 'Furniture', 199); +select * from products where category = 'Electronics'; + id | name | category | price +----+--------+-------------+------- + 1 | Laptop | Electronics | 999 + 2 | Phone | Electronics | 599 +(2 rows) + +select count(*) from products_one; + count +------- + 2 +(1 row) + +select count(*) from products_two; + count +------- + 2 +(1 row) + +drop table products cascade; diff --git a/nix/tests/expected/docs-tables-and-data.out b/nix/tests/expected/docs-tables-and-data.out new file mode 100644 index 000000000..5412b55f8 --- /dev/null +++ b/nix/tests/expected/docs-tables-and-data.out @@ -0,0 +1,286 @@ +-- Test file for "Tables and Data" documentation +-- This file contains all SQL statements from https://supabase.com/docs/guides/database/tables-and-data +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); +select table_name, column_name, data_type, is_nullable, column_default +from information_schema.columns +where table_name = 'movies' +order by ordinal_position; + table_name | column_name | data_type | is_nullable | column_default +------------+-------------+-----------+-------------+---------------- + movies | id | bigint | NO | + movies | name | text | YES | + movies | description | text | YES | +(3 rows) + +drop table if exists movies; +create table movies ( + id bigint generated always as identity primary key +); +select column_name, data_type, is_nullable, column_default, is_identity, identity_generation +from information_schema.columns +where table_name = 'movies' and column_name = 'id'; + column_name | data_type | is_nullable | column_default | is_identity | identity_generation +-------------+-----------+-------------+----------------+-------------+--------------------- + id | bigint | NO | | YES | ALWAYS +(1 row) + +drop table if exists movies; +create table movies ( + id bigint generated by default as identity primary key +); +select column_name, data_type, is_nullable, column_default, is_identity, identity_generation +from information_schema.columns +where table_name = 'movies' and column_name = 'id'; + column_name | data_type | is_nullable | column_default | is_identity | identity_generation +-------------+-----------+-------------+----------------+-------------+--------------------- + id | bigint | NO | | YES | BY DEFAULT +(1 row) + +drop table if exists movies; +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); +insert into movies + (name, description) +values + ( + 'The Empire Strikes Back', + 'After the Rebels are brutally overpowered by the Empire on the ice planet Hoth, Luke Skywalker begins Jedi training with Yoda.' + ), + ( + 'Return of the Jedi', + 'After a daring mission to rescue Han Solo from Jabba the Hutt, the Rebels dispatch to Endor to destroy the second Death Star.' + ); +select id, name, description from movies order by id; + id | name | description +----+-------------------------+-------------------------------------------------------------------------------------------------------------------------------- + 1 | The Empire Strikes Back | After the Rebels are brutally overpowered by the Empire on the ice planet Hoth, Luke Skywalker begins Jedi training with Yoda. + 2 | Return of the Jedi | After a daring mission to rescue Han Solo from Jabba the Hutt, the Rebels dispatch to Endor to destroy the second Death Star. +(2 rows) + +drop table if exists movies; +drop table if exists categories; +NOTICE: table "categories" does not exist, skipping +create table categories ( + id bigint generated always as identity primary key, + name text +); +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); +alter table movies + add column category_id bigint references categories; +select + tc.table_name, + kcu.column_name, + ccu.table_name as foreign_table_name, + ccu.column_name as foreign_column_name +from information_schema.table_constraints as tc +join information_schema.key_column_usage as kcu + on tc.constraint_name = kcu.constraint_name + and tc.table_schema = kcu.table_schema +join information_schema.constraint_column_usage as ccu + on ccu.constraint_name = tc.constraint_name + and ccu.table_schema = tc.table_schema +where tc.constraint_type = 'FOREIGN KEY' and tc.table_name='movies'; + table_name | column_name | foreign_table_name | foreign_column_name +------------+-------------+--------------------+--------------------- + movies | category_id | categories | id +(1 row) + +drop table if exists performances; +NOTICE: table "performances" does not exist, skipping +drop table if exists actors; +NOTICE: table "actors" does not exist, skipping +drop table if exists movies; +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); +create table actors ( + id bigint generated by default as identity primary key, + name text +); +create table performances ( + id bigint generated by default as identity primary key, + movie_id bigint not null references movies, + actor_id bigint not null references actors +); +select table_name from information_schema.tables +where table_name in ('movies', 'actors', 'performances') +order by table_name; + table_name +-------------- + actors + movies + performances +(3 rows) + +create schema private; +select schema_name from information_schema.schemata where schema_name = 'private'; + schema_name +------------- + private +(1 row) + +drop table if exists private.salaries; +NOTICE: table "salaries" does not exist, skipping +drop table if exists actors cascade; +NOTICE: drop cascades to constraint performances_actor_id_fkey on table performances +create table actors ( + id bigint generated by default as identity primary key, + name text +); +create table private.salaries ( + id bigint generated by default as identity primary key, + salary bigint not null, + actor_id bigint not null references public.actors +); +select table_schema, table_name from information_schema.tables +where table_schema = 'private' and table_name = 'salaries'; + table_schema | table_name +--------------+------------ + private | salaries +(1 row) + +drop table if exists grades; +NOTICE: table "grades" does not exist, skipping +drop table if exists courses; +NOTICE: table "courses" does not exist, skipping +drop table if exists students; +NOTICE: table "students" does not exist, skipping +drop view if exists transcripts; +NOTICE: view "transcripts" does not exist, skipping +create table students ( + id bigint generated by default as identity primary key, + name text, + type text +); +create table courses ( + id bigint generated by default as identity primary key, + title text, + code text +); +create table grades ( + id bigint generated by default as identity primary key, + student_id bigint not null references students, + course_id bigint not null references courses, + result text +); +-- Insert test data +insert into students (name, type) values + ('Princess Leia', 'undergraduate'), + ('Yoda', 'graduate'), + ('Anakin Skywalker', 'graduate'); +insert into courses (title, code) values + ('Introduction to Postgres', 'PG101'), + ('Authentication Theories', 'AUTH205'), + ('Fundamentals of Supabase', 'SUP412'); +insert into grades (student_id, course_id, result) values + (1, 1, 'B+'), + (1, 3, 'A+'), + (2, 2, 'A'), + (3, 1, 'A-'), + (3, 2, 'A'), + (3, 3, 'B-'); +-- Create view +create view transcripts as + select + students.name, + students.type, + courses.title, + courses.code, + grades.result + from grades + left join students on grades.student_id = students.id + left join courses on grades.course_id = courses.id; +grant all on table transcripts to authenticated; +select name, type, title, code, result from transcripts order by name, code; + name | type | title | code | result +------------------+---------------+--------------------------+---------+-------- + Anakin Skywalker | graduate | Authentication Theories | AUTH205 | A + Anakin Skywalker | graduate | Introduction to Postgres | PG101 | A- + Anakin Skywalker | graduate | Fundamentals of Supabase | SUP412 | B- + Princess Leia | undergraduate | Introduction to Postgres | PG101 | B+ + Princess Leia | undergraduate | Fundamentals of Supabase | SUP412 | A+ + Yoda | graduate | Authentication Theories | AUTH205 | A +(6 rows) + +drop materialized view if exists transcripts_materialized; +NOTICE: materialized view "transcripts_materialized" does not exist, skipping +create materialized view transcripts_materialized as + select + students.name, + students.type, + courses.title, + courses.code, + grades.result + from + grades + left join students on grades.student_id = students.id + left join courses on grades.course_id = courses.id; +select name, type, title, code, result from transcripts_materialized order by name, code; + name | type | title | code | result +------------------+---------------+--------------------------+---------+-------- + Anakin Skywalker | graduate | Authentication Theories | AUTH205 | A + Anakin Skywalker | graduate | Introduction to Postgres | PG101 | A- + Anakin Skywalker | graduate | Fundamentals of Supabase | SUP412 | B- + Princess Leia | undergraduate | Introduction to Postgres | PG101 | B+ + Princess Leia | undergraduate | Fundamentals of Supabase | SUP412 | A+ + Yoda | graduate | Authentication Theories | AUTH205 | A +(6 rows) + +refresh materialized view transcripts_materialized; +select count(*) from transcripts_materialized; + count +------- + 6 +(1 row) + +drop view if exists secure_transcripts; +NOTICE: view "secure_transcripts" does not exist, skipping +create view secure_transcripts with(security_invoker=true) as ( + select name, type, title, code, result from transcripts +); +select schemaname, viewname +from pg_views +where viewname = 'secure_transcripts'; + schemaname | viewname +------------+-------------------- + public | secure_transcripts +(1 row) + +drop view if exists test_view; +NOTICE: view "test_view" does not exist, skipping +create view test_view as select 1 as test_col; +alter view test_view set (security_invoker = true); +select schemaname, viewname +from pg_views +where viewname = 'test_view'; + schemaname | viewname +------------+----------- + public | test_view +(1 row) + +drop materialized view if exists transcripts_materialized; +drop view if exists secure_transcripts; +drop view if exists transcripts; +drop view if exists test_view; +drop table if exists grades; +drop table if exists courses; +drop table if exists students; +drop table if exists private.salaries; +drop table if exists actors; +drop table if exists performances; +drop table if exists movies; +drop table if exists categories; +drop schema if exists private; diff --git a/nix/tests/expected/docs-triggers.out b/nix/tests/expected/docs-triggers.out new file mode 100644 index 000000000..a3a850925 --- /dev/null +++ b/nix/tests/expected/docs-triggers.out @@ -0,0 +1,217 @@ +-- testing sql found in https://supabase.com/docs/guides/database/postgres/triggers +create table employees ( + id serial primary key, + name text, + salary numeric +); +create table salary_log ( + id serial primary key, + employee_id integer, + old_salary numeric, + new_salary numeric, + created_at timestamp default now() +); +create function update_salary_log() +returns trigger +language plpgsql +as $$ +begin + insert into salary_log(employee_id, old_salary, new_salary) + values (new.id, old.salary, new.salary); + return new; +end; +$$; +create trigger salary_update_trigger +after update on employees +for each row +execute function update_salary_log(); +insert into employees (name, salary) values ('John Doe', 50000); +insert into employees (name, salary) values ('Jane Smith', 60000); +update employees set salary = 55000 where name = 'John Doe'; +select id, employee_id, old_salary, new_salary from salary_log; + id | employee_id | old_salary | new_salary +----+-------------+------------+------------ + 1 | 1 | 50000 | 55000 +(1 row) + +create table orders ( + id serial primary key, + customer_id integer, + amount numeric, + status text +); +create table customers ( + id serial primary key, + name text, + email text +); +create function before_insert_function() +returns trigger +language plpgsql +as $$ +begin + if new.amount <= 0 then + raise exception 'Order amount must be greater than 0'; + end if; + return new; +end; +$$; +create trigger before_insert_trigger +before insert on orders +for each row +execute function before_insert_function(); +create table customer_audit ( + id serial primary key, + customer_id integer, + action text, + customer_name text, + deleted_at timestamp default now() +); +create function after_delete_function() +returns trigger +language plpgsql +as $$ +begin + insert into customer_audit(customer_id, action, customer_name) + values (old.id, 'DELETE', old.name); + return old; +end; +$$; +create trigger after_delete_trigger +after delete on customers +for each row +execute function after_delete_function(); +insert into customers (name, email) values ('Alice Johnson', 'alice@example.com'); +insert into customers (name, email) values ('Bob Wilson', 'bob@example.com'); +insert into orders (customer_id, amount, status) values (1, 100.50, 'pending'); +insert into orders (customer_id, amount, status) values (2, 250.75, 'pending'); +delete from customers where name = 'Alice Johnson'; +select id, customer_id, action, customer_name from customer_audit; + id | customer_id | action | customer_name +----+-------------+--------+--------------- + 1 | 1 | DELETE | Alice Johnson +(1 row) + +create table trigger_events ( + id serial primary key, + trigger_name text, + operation text, + table_name text, + event_time timestamp default now() +); +create function statement_level_function() +returns trigger +language plpgsql +as $$ +begin + insert into trigger_events(trigger_name, operation, table_name) + values (tg_name, tg_op, tg_table_name); + return null; +end; +$$; +create trigger statement_level_trigger +after insert on orders +for each statement +execute function statement_level_function(); +insert into orders (customer_id, amount, status) values (2, 150.25, 'pending'); +select id, trigger_name, operation, table_name from trigger_events; + id | trigger_name | operation | table_name +----+-------------------------+-----------+------------ + 1 | statement_level_trigger | INSERT | orders +(1 row) + +create table trigger_variables_log ( + id serial primary key, + trigger_name text, + trigger_when text, + operation text, + table_name text, + table_schema text, + row_id integer, + event_time timestamp default now() +); +create function trigger_variables_example() +returns trigger +language plpgsql +as $$ +begin + if tg_op = 'INSERT' then + insert into trigger_variables_log(trigger_name, trigger_when, operation, table_name, table_schema, row_id) + values (tg_name, tg_when, tg_op, tg_table_name, tg_table_schema, new.id); + elsif tg_op = 'UPDATE' then + insert into trigger_variables_log(trigger_name, trigger_when, operation, table_name, table_schema, row_id) + values (tg_name, tg_when, tg_op, tg_table_name, tg_table_schema, new.id); + elsif tg_op = 'DELETE' then + insert into trigger_variables_log(trigger_name, trigger_when, operation, table_name, table_schema, row_id) + values (tg_name, tg_when, tg_op, tg_table_name, tg_table_schema, old.id); + end if; + + return coalesce(new, old); +end; +$$; +create trigger variables_trigger +after insert or update or delete on employees +for each row +execute function trigger_variables_example(); +insert into employees (name, salary) values ('Charlie Brown', 45000); +update employees set salary = 47000 where name = 'Charlie Brown'; +delete from employees where name = 'Charlie Brown'; +select id, trigger_name, trigger_when, operation, table_name, table_schema, row_id from trigger_variables_log; + id | trigger_name | trigger_when | operation | table_name | table_schema | row_id +----+-------------------+--------------+-----------+------------+--------------+-------- + 1 | variables_trigger | AFTER | INSERT | employees | public | 3 + 2 | variables_trigger | AFTER | UPDATE | employees | public | 3 + 3 | variables_trigger | AFTER | DELETE | employees | public | 3 +(3 rows) + +create table high_salary_alerts ( + id serial primary key, + employee_name text, + salary numeric, + alert_time timestamp default now() +); +create function conditional_trigger_function() +returns trigger +language plpgsql +as $$ +begin + if new.salary > 100000 then + insert into high_salary_alerts(employee_name, salary) + values (new.name, new.salary); + end if; + return new; +end; +$$; +create trigger conditional_trigger +after insert or update on employees +for each row +when (new.salary > 100000) +execute function conditional_trigger_function(); +insert into employees (name, salary) values ('Executive', 150000); +insert into employees (name, salary) values ('Intern', 30000); +select id, employee_name, salary from high_salary_alerts; + id | employee_name | salary +----+---------------+-------- + 1 | Executive | 150000 +(1 row) + +drop trigger conditional_trigger on employees; +drop trigger variables_trigger on employees; +drop trigger statement_level_trigger on orders; +drop trigger after_delete_trigger on customers; +drop trigger before_insert_trigger on orders; +drop trigger salary_update_trigger on employees; +drop function conditional_trigger_function(); +drop function trigger_variables_example(); +drop function statement_level_function(); +drop function after_delete_function(); +drop function before_insert_function(); +drop function update_salary_log(); +drop table high_salary_alerts; +drop table trigger_variables_log; +drop table trigger_events; +drop table customer_audit; +drop table salary_log; +drop table employees; +drop table orders; +drop table customers; diff --git a/nix/tests/expected/docs-webhooks.out b/nix/tests/expected/docs-webhooks.out new file mode 100644 index 000000000..20e4b2fb1 --- /dev/null +++ b/nix/tests/expected/docs-webhooks.out @@ -0,0 +1,266 @@ +-- testing sql found in https://supabase.com/docs/guides/database/webhooks +create table profiles ( + id serial primary key, + name text, + email text, + created_at timestamp default now() +); +create table webhook_logs ( + id serial primary key, + webhook_name text, + event_type text, + table_name text, + schema_name text, + record_data jsonb, + old_record_data jsonb, + created_at timestamp default now() +); +create function webhook_handler() +returns trigger +language plpgsql +as $$ +declare + payload jsonb; +begin + if tg_op = 'INSERT' then + payload := jsonb_build_object( + 'type', 'INSERT', + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', null + ); + elsif tg_op = 'UPDATE' then + payload := jsonb_build_object( + 'type', 'UPDATE', + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', to_jsonb(old) + ); + elsif tg_op = 'DELETE' then + payload := jsonb_build_object( + 'type', 'DELETE', + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', null, + 'old_record', to_jsonb(old) + ); + end if; + + insert into webhook_logs(webhook_name, event_type, table_name, schema_name, record_data, old_record_data) + values (tg_name, tg_op, tg_table_name, tg_table_schema, payload->'record', payload->'old_record'); + + return coalesce(new, old); +end; +$$; +create trigger insert_webhook +after insert on profiles +for each row +execute function webhook_handler(); +create trigger update_webhook +after update on profiles +for each row +execute function webhook_handler(); +create trigger delete_webhook +after delete on profiles +for each row +execute function webhook_handler(); +insert into profiles (name, email) values ('John Doe', 'john@example.com'); +insert into profiles (name, email) values ('Jane Smith', 'jane@example.com'); +select id, webhook_name, event_type, table_name, schema_name, record_data - 'created_at' from webhook_logs where event_type = 'INSERT'; + id | webhook_name | event_type | table_name | schema_name | ?column? +----+----------------+------------+------------+-------------+-------------------------------------------------------------- + 1 | insert_webhook | INSERT | profiles | public | {"id": 1, "name": "John Doe", "email": "john@example.com"} + 2 | insert_webhook | INSERT | profiles | public | {"id": 2, "name": "Jane Smith", "email": "jane@example.com"} +(2 rows) + +update profiles set email = 'john.doe@example.com' where name = 'John Doe'; +select id, webhook_name, event_type, table_name, schema_name, record_data - 'created_at', old_record_data - 'created_at' from webhook_logs where event_type = 'UPDATE'; + id | webhook_name | event_type | table_name | schema_name | ?column? | ?column? +----+----------------+------------+------------+-------------+----------------------------------------------------------------+------------------------------------------------------------ + 3 | update_webhook | UPDATE | profiles | public | {"id": 1, "name": "John Doe", "email": "john.doe@example.com"} | {"id": 1, "name": "John Doe", "email": "john@example.com"} +(1 row) + +delete from profiles where name = 'Jane Smith'; +select id, webhook_name, event_type, table_name, schema_name, old_record_data - 'created_at' from webhook_logs where event_type = 'DELETE'; + id | webhook_name | event_type | table_name | schema_name | ?column? +----+----------------+------------+------------+-------------+-------------------------------------------------------------- + 4 | delete_webhook | DELETE | profiles | public | {"id": 2, "name": "Jane Smith", "email": "jane@example.com"} +(1 row) + +create table orders ( + id serial primary key, + customer_id integer, + amount numeric, + status text +); +create table webhook_requests ( + id serial primary key, + webhook_name text, + url text, + method text, + headers jsonb, + payload jsonb, + response_status integer, + response_data jsonb, + created_at timestamp default now() +); +create function mock_http_response(url text) +returns table(status integer, body jsonb) +language plpgsql +as $$ +begin + if url like '%dummyjson.com/products/1%' then + return query select 200, '{"id": 1, "title": "Essence Mascara Lash Princess", "price": 9.99, "category": "beauty"}'::jsonb; + elsif url like '%dummyjson.com/products/2%' then + return query select 200, '{"id": 2, "title": "Eyeshadow Palette with Mirror", "price": 19.99, "category": "beauty"}'::jsonb; + else + return query select 404, '{"error": "Product not found"}'::jsonb; + end if; +end; +$$; +create function http_webhook_handler() +returns trigger +language plpgsql +as $$ +declare + response_code integer; + response_body jsonb; +begin + select status, body into response_code, response_body + from mock_http_response('https://dummyjson.com/products/1'); + + insert into webhook_requests(webhook_name, url, method, headers, payload, response_status, response_data) + values ( + 'orders_webhook', + 'https://dummyjson.com/products/1', + 'GET', + '{"Content-Type": "application/json"}'::jsonb, + jsonb_build_object( + 'type', tg_op, + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', to_jsonb(old) + ), + response_code, + response_body + ); + + return coalesce(new, old); +end; +$$; +create trigger http_webhook +after insert or update or delete on orders +for each row +execute function http_webhook_handler(); +insert into orders (customer_id, amount, status) values (1, 100.50, 'pending'); +insert into orders (customer_id, amount, status) values (2, 250.75, 'completed'); +select id, webhook_name, url, method, response_status, response_data from webhook_requests; + id | webhook_name | url | method | response_status | response_data +----+----------------+----------------------------------+--------+-----------------+------------------------------------------------------------------------------------------ + 1 | orders_webhook | https://dummyjson.com/products/1 | GET | 200 | {"id": 1, "price": 9.99, "title": "Essence Mascara Lash Princess", "category": "beauty"} + 2 | orders_webhook | https://dummyjson.com/products/1 | GET | 200 | {"id": 1, "price": 9.99, "title": "Essence Mascara Lash Princess", "category": "beauty"} +(2 rows) + +create table webhook_config ( + id serial primary key, + webhook_name text, + url text, + method text default 'GET', + headers jsonb default '{"Content-Type": "application/json"}'::jsonb, + timeout_ms integer default 1000 +); +insert into webhook_config (webhook_name, url, method, headers, timeout_ms) +values ('product_webhook', 'https://dummyjson.com/products/2', 'GET', '{"Content-Type": "application/json"}'::jsonb, 5000); +create function configurable_webhook_handler() +returns trigger +language plpgsql +as $$ +declare + config webhook_config%rowtype; + payload jsonb; + response_code integer; + response_body jsonb; +begin + select * into config from webhook_config where webhook_name = 'product_webhook' limit 1; + + if config.id is null then + raise exception 'Webhook configuration not found'; + end if; + + payload := jsonb_build_object( + 'type', tg_op, + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', to_jsonb(old) + ); + + select status, body into response_code, response_body + from mock_http_response(config.url); + + insert into webhook_requests(webhook_name, url, method, headers, payload, response_status, response_data) + values (config.webhook_name, config.url, config.method, config.headers, payload, response_code, response_body); + + if response_code != 200 then + raise exception 'Configurable webhook failed with status: %', response_code; + end if; + + return coalesce(new, old); +end; +$$; +create trigger product_webhook +after insert on profiles +for each row +execute function configurable_webhook_handler(); +insert into profiles (name, email) values ('Alice Johnson', 'alice@example.com'); +select id, webhook_name, url, method, response_status, response_data from webhook_requests where webhook_name = 'product_webhook'; + id | webhook_name | url | method | response_status | response_data +----+-----------------+----------------------------------+--------+-----------------+------------------------------------------------------------------------------------------- + 3 | product_webhook | https://dummyjson.com/products/2 | GET | 200 | {"id": 2, "price": 19.99, "title": "Eyeshadow Palette with Mirror", "category": "beauty"} +(1 row) + +create function get_webhook_logs() +returns table ( + webhook_name text, + event_type text, + table_name text, + record_count bigint +) +language sql +as $$ + select + webhook_name, + event_type, + table_name, + count(*) as record_count + from webhook_logs + group by webhook_name, event_type, table_name + order by webhook_name, event_type; +$$; +select * from get_webhook_logs(); + webhook_name | event_type | table_name | record_count +----------------+------------+------------+-------------- + delete_webhook | DELETE | profiles | 1 + insert_webhook | INSERT | profiles | 3 + update_webhook | UPDATE | profiles | 1 +(3 rows) + +drop trigger product_webhook on profiles; +drop trigger http_webhook on orders; +drop trigger delete_webhook on profiles; +drop trigger update_webhook on profiles; +drop trigger insert_webhook on profiles; +drop function configurable_webhook_handler(); +drop function http_webhook_handler(); +drop function mock_http_response(text); +drop function get_webhook_logs(); +drop function webhook_handler(); +drop table webhook_config; +drop table webhook_requests; +drop table webhook_logs; +drop table orders; +drop table profiles; + diff --git a/nix/tests/sql/docs-array-test.sql b/nix/tests/sql/docs-array-test.sql new file mode 100644 index 000000000..ee81d2ce0 --- /dev/null +++ b/nix/tests/sql/docs-array-test.sql @@ -0,0 +1,14 @@ +-- testing sql found in https://supabase.com/docs/guides/database/arrays + +create table arraytest ( + id integer not null, + textarray text array +); + +INSERT INTO arraytest (id, textarray) VALUES (1, ARRAY['Harry', 'Larry', 'Moe']);; + +select * from arraytest; + +SELECT textarray[1], array_length(textarray, 1) FROM arraytest; + +drop table arraytest cascade; diff --git a/nix/tests/sql/docs-cascades-deletes.sql b/nix/tests/sql/docs-cascades-deletes.sql new file mode 100644 index 000000000..5a3f75c73 --- /dev/null +++ b/nix/tests/sql/docs-cascades-deletes.sql @@ -0,0 +1,262 @@ +-- testing sql found in https://supabase.com/docs/guides/database/postgres/cascades-deletes +-- all of the errors produced by this file are expected + +create table grandparent ( + id serial primary key, + name text +); + +create table parent ( + id serial primary key, + name text, + parent_id integer references grandparent (id) + on delete cascade +); + +create table child ( + id serial primary key, + name text, + father integer references parent (id) + on delete restrict +); + +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); + +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); + +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); + +insert into child + (id, name, father) +values + (1, 'William', 1); + +select count(*) from grandparent; +select count(*) from parent; +select count(*) from child; + +delete from grandparent; + +select count(*) from grandparent; +select count(*) from parent; +select count(*) from child; + +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); + +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); + +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); + +insert into child + (id, name, father) +values + (1, 'William', 1); + +alter table child +drop constraint child_father_fkey; + +alter table child +add constraint child_father_fkey foreign key (father) references parent (id) + on delete no action; + +delete from grandparent; + +select count(*) from grandparent; +select count(*) from parent; +select count(*) from child; + +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); + +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); + +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); + +insert into child + (id, name, father) +values + (1, 'William', 1); + +alter table child +drop constraint child_father_fkey; + +alter table child +add constraint child_father_fkey foreign key (father) references parent (id) + on delete no action initially deferred; + +delete from grandparent; + +select count(*) from grandparent; +select count(*) from parent; +select count(*) from child; + +insert into grandparent + (id, name) +values + (1, 'Elizabeth'); + +insert into parent + (id, name, parent_id) +values + (1, 'Charles', 1); + +insert into parent + (id, name, parent_id) +values + (2, 'Diana', 1); + +insert into child + (id, name, father) +values + (1, 'William', 1); + +alter table child +add column mother integer references parent (id) + on delete cascade; + +update child +set mother = 2 +where id = 1; + +delete from grandparent; + +select count(*) from grandparent; +select count(*) from parent; +select count(*) from child; + +create table test_cascade ( + id serial primary key, + name text +); + +create table test_cascade_child ( + id serial primary key, + parent_id integer references test_cascade (id) on delete cascade, + name text +); + +insert into test_cascade (name) values ('Parent'); +insert into test_cascade_child (parent_id, name) values (1, 'Child'); + +delete from test_cascade; + +select count(*) from test_cascade; +select count(*) from test_cascade_child; + +create table test_restrict ( + id serial primary key, + name text +); + +create table test_restrict_child ( + id serial primary key, + parent_id integer references test_restrict (id) on delete restrict, + name text +); + +insert into test_restrict (name) values ('Parent'); +insert into test_restrict_child (parent_id, name) values (1, 'Child'); + +delete from test_restrict; + +select count(*) from test_restrict; +select count(*) from test_restrict_child; + +create table test_set_null ( + id serial primary key, + name text +); + +create table test_set_null_child ( + id serial primary key, + parent_id integer references test_set_null (id) on delete set null, + name text +); + +insert into test_set_null (name) values ('Parent'); +insert into test_set_null_child (parent_id, name) values (1, 'Child'); + +delete from test_set_null; + +select count(*) from test_set_null; +select count(*) from test_set_null_child; +select parent_id from test_set_null_child; + +create table test_set_default ( + id serial primary key, + name text +); + +create table test_set_default_child ( + id serial primary key, + parent_id integer default 999 references test_set_default (id) on delete set default, + name text +); + +insert into test_set_default (name) values ('Parent'); +insert into test_set_default_child (parent_id, name) values (1, 'Child'); + +delete from test_set_default; + +select count(*) from test_set_default; +select count(*) from test_set_default_child; +select parent_id from test_set_default_child; + +create table test_no_action ( + id serial primary key, + name text +); + +create table test_no_action_child ( + id serial primary key, + parent_id integer references test_no_action (id) on delete no action, + name text +); + +insert into test_no_action (name) values ('Parent'); +insert into test_no_action_child (parent_id, name) values (1, 'Child'); + +delete from test_no_action; + +select count(*) from test_no_action; +select count(*) from test_no_action_child; + +drop table if exists test_cascade_child; +drop table if exists test_cascade; +drop table if exists test_restrict_child; +drop table if exists test_restrict; +drop table if exists test_set_null_child; +drop table if exists test_set_null; +drop table if exists test_set_default_child; +drop table if exists test_set_default; +drop table if exists test_no_action_child; +drop table if exists test_no_action; +drop table if exists child; +drop table if exists parent; +drop table if exists grandparent; diff --git a/nix/tests/sql/docs-connections.sql b/nix/tests/sql/docs-connections.sql new file mode 100644 index 000000000..500d53ea6 --- /dev/null +++ b/nix/tests/sql/docs-connections.sql @@ -0,0 +1,12 @@ +-- testing sql found in https://supabase.com/docs/guides/database/connection-management +-- we can't test every sql statement in this doc because their results won't be deterministic +select + ssl, + datname as database, + usename as connected_role, + application_name, + query, + state +from pg_stat_ssl +join pg_stat_activity +on pg_stat_ssl.pid = pg_stat_activity.pid; diff --git a/nix/tests/sql/docs-enums.sql b/nix/tests/sql/docs-enums.sql new file mode 100644 index 000000000..f9b54283a --- /dev/null +++ b/nix/tests/sql/docs-enums.sql @@ -0,0 +1,154 @@ +-- testing sql found in https://supabase.com/docs/guides/database/postgresenums + +create type mood as enum ( + 'happy', + 'sad', + 'excited', + 'calm' +); + +create table person ( + id serial primary key, + name text, + current_mood mood +); + +insert into person + (name, current_mood) +values + ('Alice', 'happy'); + +insert into person + (name, current_mood) +values + ('Bob', 'sad'); + +insert into person + (name, current_mood) +values + ('Charlie', 'excited'); + +select * +from person +where current_mood = 'sad'; + +select * +from person +where current_mood = 'happy'; + +update person +set current_mood = 'excited' +where name = 'Alice'; + +select * +from person +where name = 'Alice'; + +alter type mood add value 'content'; + +insert into person + (name, current_mood) +values + ('David', 'content'); + +select enum_range(null::mood); + +select * +from person +where current_mood = 'content'; + +create type status as enum ( + 'active', + 'inactive', + 'pending' +); + +create table orders ( + id serial primary key, + order_number text, + status status +); + +insert into orders + (order_number, status) +values + ('ORD-001', 'active'), + ('ORD-002', 'pending'), + ('ORD-003', 'inactive'); + +select * +from orders +where status = 'active'; + +update orders +set status = 'inactive' +where order_number = 'ORD-002'; + +select * +from orders +where order_number = 'ORD-002'; + +alter type status add value 'cancelled'; + +insert into orders + (order_number, status) +values + ('ORD-004', 'cancelled'); + +select enum_range(null::status); + +select * +from orders +where status = 'cancelled'; + +create type priority as enum ( + 'low', + 'medium', + 'high', + 'critical' +); + +create table tasks ( + id serial primary key, + title text, + priority priority +); + +insert into tasks + (title, priority) +values + ('Fix bug', 'high'), + ('Update docs', 'low'), + ('Security audit', 'critical'); + +select * +from tasks +where priority = 'critical'; + +update tasks +set priority = 'medium' +where title = 'Update docs'; + +select * +from tasks +where title = 'Update docs'; + +alter type priority add value 'urgent'; + +insert into tasks + (title, priority) +values + ('Server maintenance', 'urgent'); + +select enum_range(null::priority); + +select * +from tasks +where priority = 'urgent'; + +drop table tasks; +drop table orders; +drop table person; +drop type priority; +drop type status; +drop type mood; diff --git a/nix/tests/sql/docs-full-text-search.sql b/nix/tests/sql/docs-full-text-search.sql new file mode 100644 index 000000000..d62521e15 --- /dev/null +++ b/nix/tests/sql/docs-full-text-search.sql @@ -0,0 +1,197 @@ +-- testing sql found in https://supabase.com/docs/guides/database/full-text-search +create table books ( + id serial primary key, + title text, + author text, + description text +); + +insert into books + (title, author, description) +values + ( + 'The Poky Little Puppy', + 'Janette Sebring Lowrey', + 'Puppy is slower than other, bigger animals.' + ), + ('The Tale of Peter Rabbit', 'Beatrix Potter', 'Rabbit eats some vegetables.'), + ('Tootle', 'Gertrude Crampton', 'Little toy train has big dreams.'), + ( + 'Green Eggs and Ham', + 'Dr. Seuss', + 'Sam has changing food preferences and eats unusually colored food.' + ), + ( + 'Harry Potter and the Goblet of Fire', + 'J.K. Rowling', + 'Fourth year of school starts, big drama ensues.' + ); + +select to_tsvector('green eggs and ham'); + +select to_tsvector('english', 'green eggs and ham'); + +select * +from books +where title = 'Harry'; + +select * +from books +where to_tsvector(title) @@ to_tsquery('Harry'); + +select + * +from + books +where + to_tsvector(description) + @@ to_tsquery('big'); + +select + * +from + books +where + to_tsvector(description || ' ' || title) + @@ to_tsquery('little'); + +create function title_description(books) returns text as $$ + select $1.title || ' ' || $1.description; +$$ language sql immutable; + +select + * +from + books +where + to_tsvector(title_description(books.*)) + @@ to_tsquery('little'); + +select + * +from + books +where + to_tsvector(description) + @@ to_tsquery('little & big'); + +select + * +from + books +where + to_tsvector(description) + @@ to_tsquery('little | big'); + +select title from books where to_tsvector(title) @@ to_tsquery('Lit:*'); + +create or replace function search_books_by_title_prefix(prefix text) +returns setof books AS $$ +begin + return query + select * from books where to_tsvector('english', title) @@ to_tsquery(prefix || ':*'); +end; +$$ language plpgsql; + +select * from search_books_by_title_prefix('Lit'); + +select * from search_books_by_title_prefix('Little+Puppy'); + +alter table + books +add column + fts tsvector generated always as (to_tsvector('english', description || ' ' || title)) stored; + +create index books_fts on books using gin (fts); + +select id, fts +from books; + +select + * +from + books +where + fts @@ to_tsquery('little & big'); + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('big <-> dreams'); + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('year <2> school'); + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('big & !little'); + +select + * +from + books +where + to_tsvector(title) @@ to_tsquery('harry & potter'); + +select + * +from + books +where + to_tsvector(description) @@ to_tsquery('food & !egg'); + +select + * +from + books +where + to_tsvector(title || ' ' || description) @@ to_tsquery('train & toy'); + +select + * +from + books +where + fts @@ to_tsquery('puppy & slow'); + +select + * +from + books +where + fts @@ to_tsquery('rabbit | peter'); + +select + * +from + books +where + fts @@ to_tsquery('harry <-> potter'); + +select + * +from + books +where + fts @@ to_tsquery('fourth <3> year'); + +select + * +from + books +where + fts @@ to_tsquery('big & !drama'); + +drop function search_books_by_title_prefix(text); +drop function title_description(books); +drop table books; + \ No newline at end of file diff --git a/nix/tests/sql/docs-functions.sql b/nix/tests/sql/docs-functions.sql new file mode 100644 index 000000000..f91f17261 --- /dev/null +++ b/nix/tests/sql/docs-functions.sql @@ -0,0 +1,225 @@ +-- testing sql found in https://supabase.com/docs/guides/database/functions + +create or replace function hello_world() +returns text +language sql +as $$ + select 'hello world'; +$$; + +select hello_world(); + +create table planets ( + id serial primary key, + name text +); + +insert into planets + (id, name) +values + (1, 'Tattoine'), + (2, 'Alderaan'), + (3, 'Kashyyyk'); + +create table people ( + id serial primary key, + name text, + planet_id bigint references planets +); + +insert into people + (id, name, planet_id) +values + (1, 'Anakin Skywalker', 1), + (2, 'Luke Skywalker', 1), + (3, 'Princess Leia', 2), + (4, 'Chewbacca', 3); + +create or replace function get_planets() +returns setof planets +language sql +as $$ + select * from planets; +$$; + +select * +from get_planets() +where id = 1; + +create or replace function add_planet(name text) +returns bigint +language plpgsql +as $$ +declare + new_row bigint; +begin + insert into planets(name) + values (add_planet.name) + returning id into new_row; + + return new_row; +end; +$$; + +select * from add_planet('Jakku'); + +create function hello_world_definer() +returns text +language plpgsql +security definer set search_path = '' +as $$ +begin + select 'hello world'; +end; +$$; + +select hello_world_definer(); + +revoke execute on function public.hello_world from public; +revoke execute on function public.hello_world from anon; + +grant execute on function public.hello_world to authenticated; + +revoke execute on all functions in schema public from public; +revoke execute on all functions in schema public from anon, authenticated; + +alter default privileges in schema public revoke execute on functions from public; +alter default privileges in schema public revoke execute on functions from anon, authenticated; + +grant execute on function public.hello_world to authenticated; + +create function logging_example( + log_message text, + warning_message text, + error_message text +) +returns void +language plpgsql +as $$ +begin + raise log 'logging message: %', log_message; + raise warning 'logging warning: %', warning_message; + raise exception 'logging error: %', error_message; +end; +$$; + +select logging_example('LOGGED MESSAGE', 'WARNING MESSAGE', 'ERROR MESSAGE'); + +create or replace function error_if_null(some_val text) +returns text +language plpgsql +as $$ +begin + if some_val is null then + raise exception 'some_val should not be NULL'; + end if; + return some_val; +end; +$$; + +select error_if_null('not null'); + +create table attendance_table ( + id uuid primary key, + student text +); + +insert into attendance_table (id, student) values ('123e4567-e89b-12d3-a456-426614174000', 'Harry Potter'); + +create function assert_example(name text) +returns uuid +language plpgsql +as $$ +declare + student_id uuid; +begin + select + id into student_id + from attendance_table + where student = name; + + assert student_id is not null, 'assert_example() ERROR: student not found'; + + return student_id; +end; +$$; + +select assert_example('Harry Potter'); + +create function error_example() +returns void +language plpgsql +as $$ +begin + select * from table_that_does_not_exist; + + exception + when others then + raise exception 'An error occurred in function : %', sqlerrm; +end; +$$; + +select error_example(); + +create table some_table ( + col_1 int, + col_2 text +); + +insert into some_table (col_1, col_2) values (42, 'test value'); + +create or replace function advanced_example(num int default 10) +returns text +language plpgsql +as $$ +declare + var1 int := 20; + var2 text; +begin + raise log 'logging start of function call: (%)', (select now()); + + select + col_1 into var1 + from some_table + limit 1; + raise log 'logging a variable (%)', var1; + + raise log 'logging a query with a single return value(%)', (select col_1 from some_table limit 1); + + raise log 'logging an entire row as JSON (%)', (select to_jsonb(some_table.*) from some_table limit 1); + + insert into some_table (col_2) + values ('new val') + returning col_2 into var2; + + raise log 'logging a value from an INSERT (%)', var2; + + return var1 || ',' || var2; +exception + when others then + raise exception 'An error occurred in function : %', sqlerrm; +end; +$$; + +select advanced_example(); + +drop function advanced_example(int); +drop function error_example(); +drop function assert_example(text); +drop function error_if_null(text); +drop function logging_example(text, text, text); +drop function hello_world_definer(); +drop function add_planet(text); +drop function get_planets(); +drop function hello_world(); +drop table people; +drop table planets; +drop table attendance_table; +drop table some_table; + +grant execute on all functions in schema public to public; +grant execute on all functions in schema public to anon, authenticated; + +alter default privileges in schema public grant execute on functions to public; +alter default privileges in schema public grant execute on functions to anon, authenticated; + \ No newline at end of file diff --git a/nix/tests/sql/docs-indexes.sql b/nix/tests/sql/docs-indexes.sql new file mode 100644 index 000000000..9d874adb0 --- /dev/null +++ b/nix/tests/sql/docs-indexes.sql @@ -0,0 +1,26 @@ +-- testing sql found in https://supabase.com/docs/guides/database/indexes + +create table persons ( + id bigint generated by default as identity primary key, + age int, + height int, + weight int, + name text, + deceased boolean +); + +insert into persons (age, height, weight, name, deceased) values (32, 180, 70, 'John Doe', false); + +select name from persons where age = 32; + +create index idx_persons_age on persons (age); + +create index idx_living_persons_age on persons (age) where deceased is false; + +create index idx_persons_age_desc on persons (age desc nulls last); + +reindex index concurrently idx_persons_age; + +reindex table concurrently persons; + +drop table persons cascade; diff --git a/nix/tests/sql/docs-json.sql b/nix/tests/sql/docs-json.sql new file mode 100644 index 000000000..49b62338c --- /dev/null +++ b/nix/tests/sql/docs-json.sql @@ -0,0 +1,47 @@ +-- testing sql found in https://supabase.com/docs/guides/database/json + +create table books ( + id serial primary key, + title text, + author text, + metadata jsonb +); + +insert into books + (title, author, metadata) +values + ( + 'The Poky Little Puppy', + 'Janette Sebring Lowrey', + '{"description":"Puppy is slower than other, bigger animals.","price":5.95,"ages":[3,6]}' + ), + ( + 'The Tale of Peter Rabbit', + 'Beatrix Potter', + '{"description":"Rabbit eats some vegetables.","price":4.49,"ages":[2,5]}' + ), + ( + 'Tootle', + 'Gertrude Crampton', + '{"description":"Little toy train has big dreams.","price":3.99,"ages":[2,5]}' + ), + ( + 'Green Eggs and Ham', + 'Dr. Seuss', + '{"description":"Sam has changing food preferences and eats unusually colored food.","price":7.49,"ages":[4,8]}' + ), + ( + 'Harry Potter and the Goblet of Fire', + 'J.K. Rowling', + '{"description":"Fourth year of school starts, big drama ensues.","price":24.95,"ages":[10,99]}' + ); + +select + title, + metadata ->> 'description' as description, -- returned as text + metadata -> 'price' as price, + metadata -> 'ages' -> 0 as low_age, + metadata -> 'ages' -> 1 as high_age +from books; + +drop table books cascade; diff --git a/nix/tests/sql/docs-partitioning.sql b/nix/tests/sql/docs-partitioning.sql new file mode 100644 index 000000000..6037d526a --- /dev/null +++ b/nix/tests/sql/docs-partitioning.sql @@ -0,0 +1,89 @@ +-- testing sql found in https://supabase.com/docs/guides/database/partitioning + +create table sales ( + id bigint generated by default as identity, + order_date date not null, + customer_id bigint, + amount bigint, + primary key (order_date, id) +) +partition by range (order_date); + +create table sales_2000_01 + partition of sales + for values from ('2000-01-01') to ('2000-02-01'); + +create table sales_2000_02 + partition of sales + for values from ('2000-02-01') to ('2000-03-01'); + +insert into sales (order_date, customer_id, amount) values + ('2000-01-15', 1, 100), + ('2000-01-20', 2, 200), + ('2000-02-10', 3, 150), + ('2000-02-25', 4, 300); + +select * from sales where order_date >= '2000-01-01' and order_date < '2000-03-01'; + +select * from sales_2000_02; + +drop table sales cascade; + +create table customers ( + id bigint generated by default as identity, + name text, + country text, + primary key (country, id) +) +partition by list(country); + +create table customers_americas + partition of customers + for values in ('US', 'CANADA'); + +create table customers_asia + partition of customers + for values in ('INDIA', 'CHINA', 'JAPAN'); + +insert into customers (name, country) values + ('John Doe', 'US'), + ('Jane Smith', 'CANADA'), + ('Li Wei', 'CHINA'), + ('Priya Patel', 'INDIA'), + ('Yuki Tanaka', 'JAPAN'); + +select * from customers where country in ('US', 'CANADA'); + +select * from customers_asia; + +drop table customers cascade; + +create table products ( + id bigint generated by default as identity, + name text, + category text, + price bigint +) +partition by hash (id); + +create table products_one + partition of products + for values with (modulus 2, remainder 1); + +create table products_two + partition of products + for values with (modulus 2, remainder 0); + +insert into products (name, category, price) values + ('Laptop', 'Electronics', 999), + ('Phone', 'Electronics', 599), + ('Book', 'Education', 29), + ('Chair', 'Furniture', 199); + +select * from products where category = 'Electronics'; + +select count(*) from products_one; + +select count(*) from products_two; + +drop table products cascade; diff --git a/nix/tests/sql/docs-tables-and-data.sql b/nix/tests/sql/docs-tables-and-data.sql new file mode 100644 index 000000000..30a2a3cb9 --- /dev/null +++ b/nix/tests/sql/docs-tables-and-data.sql @@ -0,0 +1,249 @@ +-- Test file for "Tables and Data" documentation +-- This file contains all SQL statements from https://supabase.com/docs/guides/database/tables-and-data + +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); + +select table_name, column_name, data_type, is_nullable, column_default +from information_schema.columns +where table_name = 'movies' +order by ordinal_position; + +drop table if exists movies; +create table movies ( + id bigint generated always as identity primary key +); + +select column_name, data_type, is_nullable, column_default, is_identity, identity_generation +from information_schema.columns +where table_name = 'movies' and column_name = 'id'; + + + +drop table if exists movies; +create table movies ( + id bigint generated by default as identity primary key +); + +select column_name, data_type, is_nullable, column_default, is_identity, identity_generation +from information_schema.columns +where table_name = 'movies' and column_name = 'id'; + +drop table if exists movies; +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); + +insert into movies + (name, description) +values + ( + 'The Empire Strikes Back', + 'After the Rebels are brutally overpowered by the Empire on the ice planet Hoth, Luke Skywalker begins Jedi training with Yoda.' + ), + ( + 'Return of the Jedi', + 'After a daring mission to rescue Han Solo from Jabba the Hutt, the Rebels dispatch to Endor to destroy the second Death Star.' + ); + +select id, name, description from movies order by id; + +drop table if exists movies; +drop table if exists categories; + +create table categories ( + id bigint generated always as identity primary key, + name text +); + +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); + +alter table movies + add column category_id bigint references categories; + + +select + tc.table_name, + kcu.column_name, + ccu.table_name as foreign_table_name, + ccu.column_name as foreign_column_name +from information_schema.table_constraints as tc +join information_schema.key_column_usage as kcu + on tc.constraint_name = kcu.constraint_name + and tc.table_schema = kcu.table_schema +join information_schema.constraint_column_usage as ccu + on ccu.constraint_name = tc.constraint_name + and ccu.table_schema = tc.table_schema +where tc.constraint_type = 'FOREIGN KEY' and tc.table_name='movies'; + + +drop table if exists performances; +drop table if exists actors; +drop table if exists movies; + +create table movies ( + id bigint generated by default as identity primary key, + name text, + description text +); + +create table actors ( + id bigint generated by default as identity primary key, + name text +); + +create table performances ( + id bigint generated by default as identity primary key, + movie_id bigint not null references movies, + actor_id bigint not null references actors +); + +select table_name from information_schema.tables +where table_name in ('movies', 'actors', 'performances') +order by table_name; + + + +create schema private; + +select schema_name from information_schema.schemata where schema_name = 'private'; + +drop table if exists private.salaries; +drop table if exists actors cascade; + +create table actors ( + id bigint generated by default as identity primary key, + name text +); + +create table private.salaries ( + id bigint generated by default as identity primary key, + salary bigint not null, + actor_id bigint not null references public.actors +); + +select table_schema, table_name from information_schema.tables +where table_schema = 'private' and table_name = 'salaries'; + +drop table if exists grades; +drop table if exists courses; +drop table if exists students; +drop view if exists transcripts; + +create table students ( + id bigint generated by default as identity primary key, + name text, + type text +); + +create table courses ( + id bigint generated by default as identity primary key, + title text, + code text +); + +create table grades ( + id bigint generated by default as identity primary key, + student_id bigint not null references students, + course_id bigint not null references courses, + result text +); + +-- Insert test data +insert into students (name, type) values + ('Princess Leia', 'undergraduate'), + ('Yoda', 'graduate'), + ('Anakin Skywalker', 'graduate'); + +insert into courses (title, code) values + ('Introduction to Postgres', 'PG101'), + ('Authentication Theories', 'AUTH205'), + ('Fundamentals of Supabase', 'SUP412'); + +insert into grades (student_id, course_id, result) values + (1, 1, 'B+'), + (1, 3, 'A+'), + (2, 2, 'A'), + (3, 1, 'A-'), + (3, 2, 'A'), + (3, 3, 'B-'); + +-- Create view +create view transcripts as + select + students.name, + students.type, + courses.title, + courses.code, + grades.result + from grades + left join students on grades.student_id = students.id + left join courses on grades.course_id = courses.id; + +grant all on table transcripts to authenticated; + +select name, type, title, code, result from transcripts order by name, code; + +drop materialized view if exists transcripts_materialized; + +create materialized view transcripts_materialized as + select + students.name, + students.type, + courses.title, + courses.code, + grades.result + from + grades + left join students on grades.student_id = students.id + left join courses on grades.course_id = courses.id; + + +select name, type, title, code, result from transcripts_materialized order by name, code; + +refresh materialized view transcripts_materialized; + +select count(*) from transcripts_materialized; + +drop view if exists secure_transcripts; + +create view secure_transcripts with(security_invoker=true) as ( + select name, type, title, code, result from transcripts +); + + +select schemaname, viewname +from pg_views +where viewname = 'secure_transcripts'; + +drop view if exists test_view; +create view test_view as select 1 as test_col; + +alter view test_view set (security_invoker = true); + +select schemaname, viewname +from pg_views +where viewname = 'test_view'; + +drop materialized view if exists transcripts_materialized; +drop view if exists secure_transcripts; +drop view if exists transcripts; +drop view if exists test_view; +drop table if exists grades; +drop table if exists courses; +drop table if exists students; +drop table if exists private.salaries; +drop table if exists actors; +drop table if exists performances; +drop table if exists movies; +drop table if exists categories; +drop schema if exists private; diff --git a/nix/tests/sql/docs-triggers.sql b/nix/tests/sql/docs-triggers.sql new file mode 100644 index 000000000..1f132bfae --- /dev/null +++ b/nix/tests/sql/docs-triggers.sql @@ -0,0 +1,226 @@ +-- testing sql found in https://supabase.com/docs/guides/database/postgres/triggers + +create table employees ( + id serial primary key, + name text, + salary numeric +); + +create table salary_log ( + id serial primary key, + employee_id integer, + old_salary numeric, + new_salary numeric, + created_at timestamp default now() +); + +create function update_salary_log() +returns trigger +language plpgsql +as $$ +begin + insert into salary_log(employee_id, old_salary, new_salary) + values (new.id, old.salary, new.salary); + return new; +end; +$$; + +create trigger salary_update_trigger +after update on employees +for each row +execute function update_salary_log(); + +insert into employees (name, salary) values ('John Doe', 50000); +insert into employees (name, salary) values ('Jane Smith', 60000); + +update employees set salary = 55000 where name = 'John Doe'; + +select id, employee_id, old_salary, new_salary from salary_log; + +create table orders ( + id serial primary key, + customer_id integer, + amount numeric, + status text +); + +create table customers ( + id serial primary key, + name text, + email text +); + +create function before_insert_function() +returns trigger +language plpgsql +as $$ +begin + if new.amount <= 0 then + raise exception 'Order amount must be greater than 0'; + end if; + return new; +end; +$$; + +create trigger before_insert_trigger +before insert on orders +for each row +execute function before_insert_function(); + +create table customer_audit ( + id serial primary key, + customer_id integer, + action text, + customer_name text, + deleted_at timestamp default now() +); + +create function after_delete_function() +returns trigger +language plpgsql +as $$ +begin + insert into customer_audit(customer_id, action, customer_name) + values (old.id, 'DELETE', old.name); + return old; +end; +$$; + +create trigger after_delete_trigger +after delete on customers +for each row +execute function after_delete_function(); + +insert into customers (name, email) values ('Alice Johnson', 'alice@example.com'); +insert into customers (name, email) values ('Bob Wilson', 'bob@example.com'); + +insert into orders (customer_id, amount, status) values (1, 100.50, 'pending'); +insert into orders (customer_id, amount, status) values (2, 250.75, 'pending'); + +delete from customers where name = 'Alice Johnson'; + +select id, customer_id, action, customer_name from customer_audit; + +create table trigger_events ( + id serial primary key, + trigger_name text, + operation text, + table_name text, + event_time timestamp default now() +); + +create function statement_level_function() +returns trigger +language plpgsql +as $$ +begin + insert into trigger_events(trigger_name, operation, table_name) + values (tg_name, tg_op, tg_table_name); + return null; +end; +$$; + +create trigger statement_level_trigger +after insert on orders +for each statement +execute function statement_level_function(); + +insert into orders (customer_id, amount, status) values (2, 150.25, 'pending'); + +select id, trigger_name, operation, table_name from trigger_events; + +create table trigger_variables_log ( + id serial primary key, + trigger_name text, + trigger_when text, + operation text, + table_name text, + table_schema text, + row_id integer, + event_time timestamp default now() +); + +create function trigger_variables_example() +returns trigger +language plpgsql +as $$ +begin + if tg_op = 'INSERT' then + insert into trigger_variables_log(trigger_name, trigger_when, operation, table_name, table_schema, row_id) + values (tg_name, tg_when, tg_op, tg_table_name, tg_table_schema, new.id); + elsif tg_op = 'UPDATE' then + insert into trigger_variables_log(trigger_name, trigger_when, operation, table_name, table_schema, row_id) + values (tg_name, tg_when, tg_op, tg_table_name, tg_table_schema, new.id); + elsif tg_op = 'DELETE' then + insert into trigger_variables_log(trigger_name, trigger_when, operation, table_name, table_schema, row_id) + values (tg_name, tg_when, tg_op, tg_table_name, tg_table_schema, old.id); + end if; + + return coalesce(new, old); +end; +$$; + +create trigger variables_trigger +after insert or update or delete on employees +for each row +execute function trigger_variables_example(); + +insert into employees (name, salary) values ('Charlie Brown', 45000); +update employees set salary = 47000 where name = 'Charlie Brown'; +delete from employees where name = 'Charlie Brown'; + +select id, trigger_name, trigger_when, operation, table_name, table_schema, row_id from trigger_variables_log; + +create table high_salary_alerts ( + id serial primary key, + employee_name text, + salary numeric, + alert_time timestamp default now() +); + +create function conditional_trigger_function() +returns trigger +language plpgsql +as $$ +begin + if new.salary > 100000 then + insert into high_salary_alerts(employee_name, salary) + values (new.name, new.salary); + end if; + return new; +end; +$$; + +create trigger conditional_trigger +after insert or update on employees +for each row +when (new.salary > 100000) +execute function conditional_trigger_function(); + +insert into employees (name, salary) values ('Executive', 150000); +insert into employees (name, salary) values ('Intern', 30000); + +select id, employee_name, salary from high_salary_alerts; + +drop trigger conditional_trigger on employees; +drop trigger variables_trigger on employees; +drop trigger statement_level_trigger on orders; +drop trigger after_delete_trigger on customers; +drop trigger before_insert_trigger on orders; +drop trigger salary_update_trigger on employees; + +drop function conditional_trigger_function(); +drop function trigger_variables_example(); +drop function statement_level_function(); +drop function after_delete_function(); +drop function before_insert_function(); +drop function update_salary_log(); + +drop table high_salary_alerts; +drop table trigger_variables_log; +drop table trigger_events; +drop table customer_audit; +drop table salary_log; +drop table employees; +drop table orders; +drop table customers; diff --git a/nix/tests/sql/docs-webhooks.sql b/nix/tests/sql/docs-webhooks.sql new file mode 100644 index 000000000..56ce68144 --- /dev/null +++ b/nix/tests/sql/docs-webhooks.sql @@ -0,0 +1,262 @@ +-- testing sql found in https://supabase.com/docs/guides/database/webhooks + +create table profiles ( + id serial primary key, + name text, + email text, + created_at timestamp default now() +); + +create table webhook_logs ( + id serial primary key, + webhook_name text, + event_type text, + table_name text, + schema_name text, + record_data jsonb, + old_record_data jsonb, + created_at timestamp default now() +); + +create function webhook_handler() +returns trigger +language plpgsql +as $$ +declare + payload jsonb; +begin + if tg_op = 'INSERT' then + payload := jsonb_build_object( + 'type', 'INSERT', + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', null + ); + elsif tg_op = 'UPDATE' then + payload := jsonb_build_object( + 'type', 'UPDATE', + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', to_jsonb(old) + ); + elsif tg_op = 'DELETE' then + payload := jsonb_build_object( + 'type', 'DELETE', + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', null, + 'old_record', to_jsonb(old) + ); + end if; + + insert into webhook_logs(webhook_name, event_type, table_name, schema_name, record_data, old_record_data) + values (tg_name, tg_op, tg_table_name, tg_table_schema, payload->'record', payload->'old_record'); + + return coalesce(new, old); +end; +$$; + +create trigger insert_webhook +after insert on profiles +for each row +execute function webhook_handler(); + +create trigger update_webhook +after update on profiles +for each row +execute function webhook_handler(); + +create trigger delete_webhook +after delete on profiles +for each row +execute function webhook_handler(); + +insert into profiles (name, email) values ('John Doe', 'john@example.com'); +insert into profiles (name, email) values ('Jane Smith', 'jane@example.com'); + +select id, webhook_name, event_type, table_name, schema_name, record_data - 'created_at' from webhook_logs where event_type = 'INSERT'; + +update profiles set email = 'john.doe@example.com' where name = 'John Doe'; + +select id, webhook_name, event_type, table_name, schema_name, record_data - 'created_at', old_record_data - 'created_at' from webhook_logs where event_type = 'UPDATE'; + +delete from profiles where name = 'Jane Smith'; + +select id, webhook_name, event_type, table_name, schema_name, old_record_data - 'created_at' from webhook_logs where event_type = 'DELETE'; + +create table orders ( + id serial primary key, + customer_id integer, + amount numeric, + status text +); + +create table webhook_requests ( + id serial primary key, + webhook_name text, + url text, + method text, + headers jsonb, + payload jsonb, + response_status integer, + response_data jsonb, + created_at timestamp default now() +); + +create function mock_http_response(url text) +returns table(status integer, body jsonb) +language plpgsql +as $$ +begin + if url like '%dummyjson.com/products/1%' then + return query select 200, '{"id": 1, "title": "Essence Mascara Lash Princess", "price": 9.99, "category": "beauty"}'::jsonb; + elsif url like '%dummyjson.com/products/2%' then + return query select 200, '{"id": 2, "title": "Eyeshadow Palette with Mirror", "price": 19.99, "category": "beauty"}'::jsonb; + else + return query select 404, '{"error": "Product not found"}'::jsonb; + end if; +end; +$$; + +create function http_webhook_handler() +returns trigger +language plpgsql +as $$ +declare + response_code integer; + response_body jsonb; +begin + select status, body into response_code, response_body + from mock_http_response('https://dummyjson.com/products/1'); + + insert into webhook_requests(webhook_name, url, method, headers, payload, response_status, response_data) + values ( + 'orders_webhook', + 'https://dummyjson.com/products/1', + 'GET', + '{"Content-Type": "application/json"}'::jsonb, + jsonb_build_object( + 'type', tg_op, + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', to_jsonb(old) + ), + response_code, + response_body + ); + + return coalesce(new, old); +end; +$$; + +create trigger http_webhook +after insert or update or delete on orders +for each row +execute function http_webhook_handler(); + +insert into orders (customer_id, amount, status) values (1, 100.50, 'pending'); +insert into orders (customer_id, amount, status) values (2, 250.75, 'completed'); + +select id, webhook_name, url, method, response_status, response_data from webhook_requests; + +create table webhook_config ( + id serial primary key, + webhook_name text, + url text, + method text default 'GET', + headers jsonb default '{"Content-Type": "application/json"}'::jsonb, + timeout_ms integer default 1000 +); + +insert into webhook_config (webhook_name, url, method, headers, timeout_ms) +values ('product_webhook', 'https://dummyjson.com/products/2', 'GET', '{"Content-Type": "application/json"}'::jsonb, 5000); + +create function configurable_webhook_handler() +returns trigger +language plpgsql +as $$ +declare + config webhook_config%rowtype; + payload jsonb; + response_code integer; + response_body jsonb; +begin + select * into config from webhook_config where webhook_name = 'product_webhook' limit 1; + + if config.id is null then + raise exception 'Webhook configuration not found'; + end if; + + payload := jsonb_build_object( + 'type', tg_op, + 'table', tg_table_name, + 'schema', tg_table_schema, + 'record', to_jsonb(new), + 'old_record', to_jsonb(old) + ); + + select status, body into response_code, response_body + from mock_http_response(config.url); + + insert into webhook_requests(webhook_name, url, method, headers, payload, response_status, response_data) + values (config.webhook_name, config.url, config.method, config.headers, payload, response_code, response_body); + + if response_code != 200 then + raise exception 'Configurable webhook failed with status: %', response_code; + end if; + + return coalesce(new, old); +end; +$$; + +create trigger product_webhook +after insert on profiles +for each row +execute function configurable_webhook_handler(); + +insert into profiles (name, email) values ('Alice Johnson', 'alice@example.com'); + +select id, webhook_name, url, method, response_status, response_data from webhook_requests where webhook_name = 'product_webhook'; + +create function get_webhook_logs() +returns table ( + webhook_name text, + event_type text, + table_name text, + record_count bigint +) +language sql +as $$ + select + webhook_name, + event_type, + table_name, + count(*) as record_count + from webhook_logs + group by webhook_name, event_type, table_name + order by webhook_name, event_type; +$$; + +select * from get_webhook_logs(); + +drop trigger product_webhook on profiles; +drop trigger http_webhook on orders; +drop trigger delete_webhook on profiles; +drop trigger update_webhook on profiles; +drop trigger insert_webhook on profiles; + +drop function configurable_webhook_handler(); +drop function http_webhook_handler(); +drop function mock_http_response(text); +drop function get_webhook_logs(); +drop function webhook_handler(); + +drop table webhook_config; +drop table webhook_requests; +drop table webhook_logs; +drop table orders; +drop table profiles; + \ No newline at end of file