From b0306c89bd599f9c815822e45a0c399136358005 Mon Sep 17 00:00:00 2001 From: vcombey Date: Wed, 31 Dec 2025 15:53:30 +0100 Subject: [PATCH 01/13] typed sql poc --- Guide/database.markdown | 8 + Guide/layout.html | 1 + Guide/typed-sql.markdown | 293 +++++++++++++ ihp/IHP/TypedSql.hs | 701 ++++++++++++++++++++++++++++++++ ihp/Test/Test/Main.hs | 24 +- ihp/Test/Test/TypedSqlSpec.hs | 48 +++ ihp/Test/Test/TypedSqlStub.json | 77 ++++ ihp/ihp.cabal | 4 + 8 files changed, 1145 insertions(+), 11 deletions(-) create mode 100644 Guide/typed-sql.markdown create mode 100644 ihp/IHP/TypedSql.hs create mode 100644 ihp/Test/Test/TypedSqlSpec.hs create mode 100644 ihp/Test/Test/TypedSqlStub.json diff --git a/Guide/database.markdown b/Guide/database.markdown index f12ed6ebc..029af2473 100644 --- a/Guide/database.markdown +++ b/Guide/database.markdown @@ -96,6 +96,14 @@ When dumping the database into the `Fixtures.sql` first and then rebuilding the To have the full database dumped in a portable manner, you can do `make sql_dump > /tmp/my_app.sql`, which will generate a full SQL database dump, without owner or ACL information. +## Typed SQL + +When Query Builder is not expressive enough and `sqlQuery` feels too loose, +use `typedSql` for compile-time checked SQL with IHP type inference. It uses +your schema to return `Id` and generated record types automatically. + +See [Typed SQL](https://ihp.digitallyinduced.com/Guide/typed-sql.html). + ## Haskell Bindings ### Model Context diff --git a/Guide/layout.html b/Guide/layout.html index 32a38322e..dd6f0e5bc 100644 --- a/Guide/layout.html +++ b/Guide/layout.html @@ -88,6 +88,7 @@ Basics Relationships Query Builder + Typed SQL Migrations diff --git a/Guide/typed-sql.markdown b/Guide/typed-sql.markdown new file mode 100644 index 000000000..da04f0aa9 --- /dev/null +++ b/Guide/typed-sql.markdown @@ -0,0 +1,293 @@ +# Typed SQL (typedSql) + +`typedSql` gives you compile-time checked SQL that integrates with IHP +models. It is designed for cases where Query Builder is too limited and +`sqlQuery` is too loose. The goal is to let you keep raw SQL while still +getting: + +- compile-time validation (syntax and type checks) +- automatic mapping to IHP's generated types +- column name and nullability handling consistent with IHP + +This page explains how it works, how to use it, and how to debug it. + +## When to use typedSql + +Use `typedSql` when: + +- you need complex joins, CTEs, window functions, or vendor-specific SQL +- you want a typed result without writing `FromRow` manually +- you want compile-time SQL checking against your schema + +Keep using Query Builder for simple filtering and `sqlQuery` for truly +runtime-generated SQL. + +## Quick start + +Enable Template Haskell and QuasiQuotes in a module where you want to use +`typedSql`: + +```haskell +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE DataKinds #-} +``` + +Import the module and run the query using `runTyped`: + +```haskell +module Web.Controller.Users where + +import IHP.ControllerPrelude +import IHP.TypedSql + +indexAction :: ?modelContext => IO () +indexAction = do + let userId = "00000000-0000-0000-0000-000000000001" :: Id User + users <- runTyped [typedSql| + SELECT users.id, users.name + FROM users + WHERE users.id = ${userId} + |] + + render Json { users } +``` + +`typedSql` produces a `TypedQuery` value that is executed using +`runTyped` or `runTypedOne`. + +## Result type inference + +### Full table row + +If the query returns all columns of a table in the exact order defined in +the schema (e.g. `SELECT users.*`), then the result is the generated +record type: + +```haskell +user :: User <- runTypedOne [typedSql| + SELECT users.* FROM users WHERE users.id = ${userId} +|] +``` + +### Partial selection + +When you return a subset of columns, the result is a tuple: + +```haskell +userInfo :: [(Id User, Text)] <- runTyped [typedSql| + SELECT users.id, users.name FROM users +|] +``` + +### Foreign keys + +If a column is a single-column foreign key, `typedSql` maps it to +`Id' "other_table"` automatically: + +```haskell +authorIds :: [Maybe (Id User)] <- runTyped [typedSql| + SELECT posts.author_id FROM posts WHERE posts.slug = ${slug} +|] +``` + +This follows IHP's usual `Id` mapping rules. + +## Parameter placeholders + +`typedSql` uses `${expr}` placeholders. Each placeholder becomes a `$N` +parameter and is type-checked against the database. + +```haskell +runTyped [typedSql| + SELECT * FROM posts WHERE posts.id = ${postId} +|] +``` + +Notes: + +- Do not use `?` or `$1` placeholders directly. +- Parameter types come from OIDs only, so UUID parameters are `UUID` (not + `Id'`). Use `get #id record` or `unId` if you want to pass an `Id'`. +- Use explicit type annotations for ambiguous values: + +```haskell +runTyped [typedSql| + SELECT * FROM posts WHERE posts.score > ${10 :: Int} +|] +``` + +- For arrays, prefer `= ANY(${ids})` rather than `IN (${ids})`. + +## Nullability rules + +`typedSql` tries to infer nullability from `pg_attribute` when a column is +traceable to a table. If a column comes from an expression or a `LEFT +JOIN`, the result is treated as nullable (`Maybe`) by default. + +If you want to force a non-null result, use SQL functions such as +`COALESCE`: + +```haskell +runTyped [typedSql| + SELECT COALESCE(posts.title, '') FROM posts +|] +``` + +## Type mapping + +The mapping follows IHP's conventions. Summary of common types: + +- `uuid` -> `UUID` (result columns from PK/FK map to `Id' "table"`) +- `text`, `varchar`, `bpchar`, `citext` -> `Text` +- `int2`, `int4` -> `Int` +- `int8` -> `Integer` +- `bool` -> `Bool` +- `timestamptz` -> `UTCTime` +- `timestamp` -> `LocalTime` +- `date` -> `Day` +- `time` -> `TimeOfDay` +- `json`, `jsonb` -> `Aeson.Value` +- `bytea` -> `Binary ByteString` +- `numeric` -> `Scientific` +- `interval` -> `PGInterval` +- `point` -> `Point` +- `polygon` -> `Polygon` +- `inet` -> `IP` +- `tsvector` -> `TSVector` +- enums -> `Generated.Enums.` +- composite types -> `Generated.ActualTypes.` + +If you have custom types, add a `FromField` instance and extend +`hsTypeForPg` in `IHP.TypedSql`. + +## Runtime behavior + +`runTyped` uses IHP's `ModelContext`, so it automatically: + +- uses the pooled connection +- respects row-level security (RLS) +- logs queries in the same format as `sqlQuery` + +There is no separate runtime connection layer. + +## Compile-time database access + +`typedSql` talks to your database at compile time. It uses +`DATABASE_URL` or the same default used by IHP (`build/db`). Make sure +the database is running and the schema is up to date when compiling. + +If the schema changes, recompile so the query description is refreshed. + +## Tests without a database: stub mode + +For tests, you can skip the live DB by providing a JSON stub file. This +lets you run `typedSql` in CI without a running Postgres. + +Set the environment variable before splicing any `typedSql`: + +```haskell +{-# LANGUAGE TemplateHaskell #-} + +import Language.Haskell.TH.Syntax (runIO) +import System.Environment (setEnv) + +$(do + runIO (setEnv "IHP_TYPED_SQL_STUB" "Test/Test/TypedSqlStub.json") + pure [] + ) +``` + +### Stub file format + +A stub file is a JSON document with a list of query entries. Each entry +contains: + +- the SQL string (after placeholder substitution) +- parameter OIDs +- column OIDs and table metadata +- table metadata (columns, PKs, FKs) +- type metadata (names, element OIDs, type category) + +Example: + +```json +{ + "queries": [ + { + "sql": "SELECT users.id, users.name FROM users WHERE users.id = $1", + "params": [2950], + "columns": [ + {"name": "id", "typeOid": 2950, "tableOid": 100000, "attnum": 1}, + {"name": "name", "typeOid": 25, "tableOid": 100000, "attnum": 2} + ], + "tables": [ + { + "oid": 100000, + "name": "users", + "columns": [ + {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, + {"attnum": 2, "name": "name", "typeOid": 25, "notNull": true} + ], + "primaryKeys": [1], + "foreignKeys": [] + } + ], + "types": [ + {"oid": 2950, "name": "uuid", "elemOid": null, "typtype": "b"}, + {"oid": 25, "name": "text", "elemOid": null, "typtype": "b"} + ] + } + ] +} +``` + +Only the queries your tests use need to be present in the stub file. + +## Limitations and gotchas + +- Only `${expr}` placeholders are supported. +- Queries with untracked parameters (e.g. `$1` without `${}`) will fail. +- Multi-column foreign keys are not mapped to `Id` yet. +- Nullability for computed columns defaults to `Maybe`. +- Compile-time checks require a schema that matches the runtime schema. + +## Migration guidance + +If you currently use `sqlQuery` for complex queries: + +1. Wrap the query in `[typedSql| ... |]`. +2. Replace `?` placeholders with `${expr}`. +3. Replace custom `FromRow` with inferred tuples or records. +4. Use `runTyped` instead of `sqlQuery`. + +You get compile-time SQL validation with minimal changes. + +## Troubleshooting + +**Error: could not connect to database** + +- Ensure `DATABASE_URL` is set and reachable during compilation. +- Or set `IHP_TYPED_SQL_STUB` to use stub mode. + +**Error: placeholder count mismatch** + +- Check that every parameter is written as `${expr}`. + +**Unexpected `Maybe` results** + +- The column is nullable or computed. Use `COALESCE` or accept `Maybe`. + +**Unknown type errors** + +- Add an explicit type cast in SQL or add a mapping in `IHP.TypedSql`. + +## API summary + +```haskell +typedSql :: QuasiQuoter +runTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] +runTypedOne :: (?modelContext :: ModelContext) => TypedQuery result -> IO result +``` + +See `IHP.TypedSql` for the full implementation. diff --git a/ihp/IHP/TypedSql.hs b/ihp/IHP/TypedSql.hs new file mode 100644 index 000000000..7c164624c --- /dev/null +++ b/ihp/IHP/TypedSql.hs @@ -0,0 +1,701 @@ +{-# LANGUAGE ImplicitParams #-} -- allow ?modelContext implicit parameter +{-# LANGUAGE NamedFieldPuns #-} -- allow record field puns like {field} +{-# LANGUAGE QuasiQuotes #-} -- enable [typedSql|...|] quasiquoter syntax +{-# LANGUAGE RecordWildCards #-} -- allow {..} to bind all record fields +{-# LANGUAGE TemplateHaskell #-} -- allow compile-time code generation via TH + +-- Module implementing the typed SQL quasiquoter and its supporting helpers. +module IHP.TypedSql + ( typedSql -- expose the SQL quasiquoter entry point + , TypedQuery (..) -- expose the query container and its fields + , runTyped -- execute a typed query returning all rows + , runTypedOne -- execute a typed query expecting exactly one row + ) where + +import IHP.Prelude -- IHP's standard prelude with common utilities +import Control.Monad (guard) -- guard for Maybe-based checks +import qualified Data.Aeson as Aeson -- JSON parsing for stub metadata +import Data.Aeson ((.:), (.:?), (.!=)) -- JSON field operators used in FromJSON instances +import qualified Data.ByteString as BS -- byte string for raw SQL and file IO +import qualified Data.List as List -- list helpers like groupBy +import Data.Maybe (mapMaybe) -- mapMaybe for grouping columns safely +import qualified Data.Map.Strict as Map -- strict map used for metadata lookup +import qualified Data.Set as Set -- set used for OID collection +import qualified Data.String.Conversions as CS -- convert between Text/String/ByteString +import Data.IORef (IORef, atomicModifyIORef', newIORef, readIORef) -- mutable cache for stubs +import Data.Scientific (Scientific) -- numeric type for Postgres numeric +import Data.Time (LocalTime, TimeOfDay, UTCTime) -- time types for mapping +import Data.Time.Calendar (Day) -- date-only type +import Data.UUID (UUID) -- UUID mapping +import Data.Word (Word32) -- raw OID storage in stub files +import qualified Database.PostgreSQL.LibPQ as PQ -- low-level libpq for describe/prepare +import qualified Database.PostgreSQL.Simple as PG -- high-level postgres-simple API +import qualified Database.PostgreSQL.Simple.FromRow as PGFR -- row parser typeclass +import qualified Database.PostgreSQL.Simple.ToField as PGTF -- parameter encoding +import qualified Database.PostgreSQL.Simple.ToRow as PGTR -- ToRow instance for PreparedRow +import qualified Database.PostgreSQL.Simple.Types as PG -- extra postgres-simple types (Binary, Oid, etc.) +import qualified Language.Haskell.Meta.Parse as HaskellMeta -- parse placeholder expressions as Haskell AST +import qualified Language.Haskell.TH as TH -- Template Haskell core types +import qualified Language.Haskell.TH.Quote as TH -- QuasiQuoter type +import Net.IP (IP) -- inet type mapping + +import IHP.FrameworkConfig (defaultDatabaseUrl) -- resolve DB URL for describe +import IHP.ModelSupport (Id', ModelContext, measureTimeIfLogging, withDatabaseConnection, withRLSParams) -- query helpers and Id type +import IHP.NameSupport (tableNameToModelName) -- convert SQL table names to Haskell type names +import qualified IHP.Postgres.Point as PGPoint -- Postgres point mapping +import qualified IHP.Postgres.Polygon as PGPolygon -- Postgres polygon mapping +import qualified IHP.Postgres.TSVector as PGTs -- tsvector mapping +import qualified IHP.Postgres.TimeParser as PGTime -- interval mapping +import System.Environment (lookupEnv) -- read stub path env var +import System.IO.Unsafe (unsafePerformIO) -- top-level cache with IORef + +-- | Prepared query with a custom row parser. +-- High-level: this is the runtime value produced by the typed SQL quasiquoter. +data TypedQuery result = TypedQuery + { tqQuery :: !PG.Query -- low-level: SQL text packaged for postgres-simple + , tqParams :: ![PGTF.Action] -- low-level: encoded parameter values for the query + , tqRowParser :: !(PGFR.RowParser result) -- low-level: how to decode each row into result + } + +-- Wrapper to turn a list of Action into a ToRow instance. +newtype PreparedRow = PreparedRow [PGTF.Action] -- low-level: container to satisfy ToRow +instance PGTR.ToRow PreparedRow where + toRow (PreparedRow params) = params -- low-level: pass through the parameter list + +-- | Run a typed query and return all rows. +-- High-level: delegates to postgres-simple with logging and RLS params. +runTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] +runTyped TypedQuery { tqQuery, tqParams, tqRowParser } = + withDatabaseConnection \connection -> -- obtain a DB connection from the model context + withRLSParams -- apply row-level security parameters if configured + (\query params -> + measureTimeIfLogging "🔍" connection -- measure/log query runtime with a label + (PG.queryWith tqRowParser connection query (PreparedRow params)) -- execute and parse rows + query -- log the SQL query + (PreparedRow params) -- log the query parameters + ) + tqQuery -- the SQL to execute + (PreparedRow tqParams) -- wrap params to match ToRow + +-- | Run a typed query that is expected to return a single row. +-- High-level: enforces exactly-one-row semantics. +runTypedOne :: (?modelContext :: ModelContext) => TypedQuery result -> IO result +runTypedOne typed = do + rows <- runTyped typed -- execute query and collect rows + case rows of + [row] -> pure row -- success: exactly one row + [] -> error "runTypedOne: expected exactly one row but got none" -- error on zero rows + _ -> error ("runTypedOne: expected a single row but got " <> show (length rows)) -- error on too many rows + +-- * Template Haskell quasiquoter + +-- | QuasiQuoter entry point for typed SQL. +-- High-level: produces a TH expression that builds a TypedQuery at compile time. +typedSql :: TH.QuasiQuoter +typedSql = + TH.QuasiQuoter + { TH.quoteExp = typedSqlExp -- allow use in expressions + , TH.quotePat = \_ -> fail "typedSql: not supported in patterns" -- disallow pattern contexts + , TH.quoteType = \_ -> fail "typedSql: not supported in types" -- disallow type contexts + , TH.quoteDec = \_ -> fail "typedSql: not supported at top-level" -- disallow declaration contexts + } + +-- | Result of describing a statement. +-- High-level: captures everything we need to infer Haskell types. +data DescribeResult = DescribeResult + { drParams :: ![PQ.Oid] -- low-level: parameter OIDs in order + , drColumns :: ![DescribeColumn] -- low-level: column metadata in order + , drTables :: !(Map.Map PQ.Oid TableMeta) -- low-level: table metadata by OID + , drTypes :: !(Map.Map PQ.Oid PgTypeInfo) -- low-level: type metadata by OID + } + +-- Metadata for a column in the result set. +data DescribeColumn = DescribeColumn + { dcName :: !BS.ByteString -- low-level: column name as bytes + , dcType :: !PQ.Oid -- low-level: column type OID + , dcTable :: !PQ.Oid -- low-level: originating table OID (0 if none) + , dcAttnum :: !(Maybe Int) -- low-level: attribute number inside table, if known + } + +-- Column details extracted from pg_attribute. +data ColumnMeta = ColumnMeta + { cmAttnum :: !Int -- low-level: attribute number within table + , cmName :: !Text -- low-level: column name + , cmTypeOid :: !PQ.Oid -- low-level: type OID for this column + , cmNotNull :: !Bool -- low-level: whether the column is NOT NULL + } + +-- Table metadata, including columns and key relationships. +data TableMeta = TableMeta + { tmOid :: !PQ.Oid -- low-level: table OID + , tmName :: !Text -- low-level: table name + , tmColumns :: !(Map.Map Int ColumnMeta) -- low-level: columns keyed by attnum + , tmColumnOrder :: ![Int] -- low-level: column order as defined in table + , tmPrimaryKeys :: !(Set.Set Int) -- low-level: attribute numbers of primary keys + , tmForeignKeys :: !(Map.Map Int PQ.Oid) -- low-level: attnum -> referenced table oid + } + +-- Postgres type metadata we need for Haskell mapping. +data PgTypeInfo = PgTypeInfo + { ptiOid :: !PQ.Oid -- low-level: type OID + , ptiName :: !Text -- low-level: type name (typname) + , ptiElem :: !(Maybe PQ.Oid) -- low-level: element type for arrays + , ptiType :: !(Maybe Char) -- low-level: typtype code (e.g. 'e' for enum) + , ptiNamespace :: !(Maybe Text) -- low-level: namespace name + } + +-- Convert libpq Oid to postgres-simple Oid. +toPGOid :: PQ.Oid -> PG.Oid +toPGOid (PQ.Oid w) = PG.Oid w -- low-level: wrap the same numeric value + +-- Convert postgres-simple Oid to libpq Oid. +toPQOid :: PG.Oid -> PQ.Oid +toPQOid (PG.Oid w) = PQ.Oid w -- low-level: wrap the same numeric value + +-- Build the TH expression for a typed SQL quasiquote. +typedSqlExp :: String -> TH.ExpQ +typedSqlExp rawSql = do + -- Replace ${expr} placeholders with $1, $2, ... and collect expressions. + let (sqlText, placeholderExprs) = substitutePlaceholders rawSql + parsedExprs <- mapM parseExpr placeholderExprs -- parse each placeholder as Haskell code + + stubPath <- TH.runIO (lookupEnv "IHP_TYPED_SQL_STUB") -- optional path to a stub file + describeResult <- TH.runIO $ case stubPath of + Just path -> describeUsingStub path sqlText -- offline: use stub metadata + Nothing -> describeStatement (CS.cs sqlText) -- online: ask Postgres to describe + + let DescribeResult { drParams, drColumns, drTables, drTypes } = describeResult -- unpack metadata + when (length drParams /= length parsedExprs) $ -- make sure counts match + fail (CS.cs ("typedSql: placeholder count mismatch. SQL expects " <> show (length drParams) <> " parameters but found " <> show (length parsedExprs) <> " ${..} expressions.")) + + paramTypes <- mapM (hsTypeForParam drTypes) drParams -- map param OIDs to Haskell types + + let annotatedParams = + zipWith (\expr paramTy -> TH.SigE expr paramTy) parsedExprs paramTypes -- add type sigs to args + + resultType <- hsTypeForColumns drTypes drTables drColumns -- compute result type from columns + + let sqlLiteral = TH.SigE (TH.LitE (TH.StringL sqlText)) (TH.ConT ''String) + queryExpr = TH.AppE (TH.ConE 'PG.Query) (TH.AppE (TH.VarE 'CS.cs) sqlLiteral) + rowParserExpr = if length drColumns == 1 then TH.VarE 'PGFR.field else TH.VarE 'PGFR.fromRow + typedQueryExpr = + TH.RecConE + 'TypedQuery + [ (TH.mkName "tqQuery", queryExpr) -- build query text + , (TH.mkName "tqParams", TH.ListE (map (TH.AppE (TH.VarE 'PGTF.toField)) annotatedParams)) -- encode params + , (TH.mkName "tqRowParser", rowParserExpr) -- parse single column or full row + ] + + pure (TH.SigE typedQueryExpr (TH.AppT (TH.ConT ''TypedQuery) resultType)) -- add overall type signature + +-- | Replace ${expr} placeholders with PostgreSQL-style $1 placeholders. +-- High-level: turns a templated SQL string into a PG-ready SQL string plus expr list. +substitutePlaceholders :: String -> (String, [String]) +substitutePlaceholders = go 1 "" [] where + go _ acc exprs [] = (reverse acc, reverse exprs) -- done: reverse accumulators + go n acc exprs ('$':'{':rest) = + let (expr, after) = breakOnClosing 0 "" rest -- parse until matching } + in go (n + 1) (reverse ('$' : CS.cs (show n)) <> acc) (expr : exprs) after -- replace with $n + go n acc exprs (c:rest) = go n (c : acc) exprs rest -- copy non-placeholder chars + + breakOnClosing depth acc [] = (reverse acc, []) -- no closing brace found + breakOnClosing depth acc ('{':xs) = breakOnClosing (depth + 1) ('{':acc) xs -- nested { increases depth + breakOnClosing depth acc ('}':xs) + | depth == 0 = (reverse acc, xs) -- close the current placeholder + | otherwise = breakOnClosing (depth - 1) ('}':acc) xs -- close a nested brace + breakOnClosing depth acc (x:xs) = breakOnClosing depth (x:acc) xs -- accumulate placeholder chars + +-- Parse a placeholder expression into TH. +parseExpr :: String -> TH.ExpQ +parseExpr exprText = + case HaskellMeta.parseExp exprText of + Left err -> fail ("typedSql: failed to parse expression {" <> exprText <> "}: " <> err) -- parse error + Right expr -> pure expr -- success: return parsed TH expression + +-- | Describe a statement using libpq, and fetch the additional metadata needed to map to Haskell types. +-- High-level: loads a DescribeResult from a stub JSON file. +describeUsingStub :: FilePath -> String -> IO DescribeResult +describeUsingStub path sqlText = do + entries <- loadStubEntries path -- load and cache stub entries + let key = CS.cs sqlText -- use the SQL text as the lookup key + maybe (fail ("typedSql: no stub entry for SQL: " <> sqlText)) pure (Map.lookup key entries) -- lookup or fail + +-- Describe a statement by asking a real Postgres server. +describeStatement :: BS.ByteString -> IO DescribeResult +describeStatement sql = do + dbUrl <- defaultDatabaseUrl -- read database URL + conn <- PQ.connectdb dbUrl -- open libpq connection + status <- PQ.status conn -- check connection state + unless (status == PQ.ConnectionOk) do + err <- PQ.errorMessage conn -- fetch error message + fail ("typedSql: could not connect to database: " <> CS.cs (fromMaybe "" err)) -- abort compile + + let statementName = "ihp_typed_sql_stmt" -- use a fixed prepared statement name + _ <- ensureOk "prepare" =<< PQ.prepare conn statementName sql Nothing -- prepare the statement + desc <- ensureOk "describe" =<< PQ.describePrepared conn statementName -- ask for metadata + + paramCount <- PQ.nparams desc -- number of parameters + paramTypes <- mapM (PQ.paramtype desc) [0 .. paramCount - 1] -- each parameter type OID + + columnCount <- PQ.nfields desc -- number of result columns + let PQ.Col columnCountCInt = columnCount + let columnCountInt = fromIntegral columnCountCInt :: Int + columns <- mapM (\i -> do + let colIndex = PQ.Col (fromIntegral i) + name <- fromMaybe "" <$> PQ.fname desc colIndex -- column name + colType <- PQ.ftype desc colIndex -- column type OID + tableOid <- PQ.ftable desc colIndex -- table OID for the column + attnumRaw <- PQ.ftablecol desc colIndex -- attribute number in table + let PQ.Col attnumCInt = attnumRaw + let attnum = if tableOid == PQ.Oid 0 then Nothing else Just (fromIntegral attnumCInt) -- ignore attnum when table is 0 + pure DescribeColumn { dcName = name, dcType = colType, dcTable = tableOid, dcAttnum = attnum } -- build column meta + ) [0 .. columnCountInt - 1] + + -- Load metadata for referenced tables and types. + let tableOids = Set.fromList (map dcTable columns) |> Set.delete (PQ.Oid 0) -- collect referenced table OIDs + typeOids = Set.fromList paramTypes <> Set.fromList (map dcType columns) -- collect referenced type OIDs + + pgConn <- PG.connectPostgreSQL dbUrl -- open postgres-simple connection for catalog queries + tables <- loadTableMeta pgConn (Set.toList tableOids) -- load table metadata + types <- loadTypeInfo pgConn (Set.toList typeOids) -- load type metadata + PG.close pgConn -- close postgres-simple connection + + _ <- PQ.exec conn ("DEALLOCATE " <> statementName) -- release prepared statement + PQ.finish conn -- close libpq connection + + pure DescribeResult { drParams = paramTypes, drColumns = columns, drTables = tables, drTypes = types } -- return full metadata + +-- Ensure libpq returned a successful result. +ensureOk :: String -> Maybe PQ.Result -> IO PQ.Result +ensureOk actionName = \case + Nothing -> fail ("typedSql: " <> actionName <> " returned no result") -- missing libpq result + Just res -> do + status <- PQ.resultStatus res -- inspect result status + case status of + PQ.CommandOk -> pure res -- OK for command + PQ.TuplesOk -> pure res -- OK for tuples + _ -> do + msg <- PQ.resultErrorMessage res -- read error message + fail ("typedSql: " <> actionName <> " failed: " <> CS.cs (fromMaybe "" msg)) -- abort + +-- | Load table metadata for all referenced tables. +-- High-level: read pg_catalog to map table/column info. +loadTableMeta :: PG.Connection -> [PQ.Oid] -> IO (Map.Map PQ.Oid TableMeta) +loadTableMeta _ [] = pure mempty -- no tables requested +loadTableMeta conn tableOids = do + rows <- PG.query conn -- fetch column info for each requested table + (mconcat + [ "SELECT c.oid, c.relname, a.attnum, a.attname, a.atttypid, a.attnotnull " + , "FROM pg_class c " + , "JOIN pg_namespace ns ON ns.oid = c.relnamespace " + , "JOIN pg_attribute a ON a.attrelid = c.oid " + , "WHERE c.oid = ANY(?) AND a.attnum > 0 AND NOT a.attisdropped " + , "ORDER BY c.oid, a.attnum" + ]) + (PG.Only (PG.PGArray (map toPGOid tableOids)) :: PG.Only (PG.PGArray PG.Oid)) -- parameterize the OID list + + primaryKeys <- PG.query conn -- fetch primary key columns for each table + "SELECT conrelid, unnest(conkey) as attnum FROM pg_constraint WHERE contype = 'p' AND conrelid = ANY(?)" + (PG.Only (PG.PGArray (map toPGOid tableOids)) :: PG.Only (PG.PGArray PG.Oid)) -- parameterize the OID list + + foreignKeys <- PG.query conn -- fetch simple (single-column) foreign keys + (mconcat + [ "SELECT conrelid, conkey[1] as attnum, confrelid " + , "FROM pg_constraint " + , "WHERE contype = 'f' AND array_length(conkey,1) = 1 AND conrelid = ANY(?)" + ]) + (PG.Only (PG.PGArray (map toPGOid tableOids)) :: PG.Only (PG.PGArray PG.Oid)) -- parameterize the OID list + + let pkMap = primaryKeys + |> foldl' (\acc (relid :: PG.Oid, att :: Int) -> + Map.insertWith Set.union (toPQOid relid) (Set.singleton att) acc + ) mempty -- build map of table -> primary key attnums + + fkMap = foreignKeys + |> foldl' (\acc (relid :: PG.Oid, att :: Int, ref :: PG.Oid) -> + Map.insertWith Map.union (toPQOid relid) (Map.singleton att (toPQOid ref)) acc + ) mempty -- build map of table -> foreign key attnum -> referenced table + + tableGroups = + rows + |> map (\(relid :: PG.Oid, name :: Text, attnum :: Int, attname :: Text, atttypid :: PG.Oid, attnotnull :: Bool) -> + (toPQOid relid, ColumnMeta { cmAttnum = attnum, cmName = attname, cmTypeOid = toPQOid atttypid, cmNotNull = attnotnull }, name) + ) -- annotate each column row with its table + |> List.groupBy (\(l, _, _) (r, _, _) -> l == r) -- group by table OID + + pure $ tableGroups + |> foldl' + (\acc group -> + case group of + [] -> acc -- no columns, keep accumulator + (relid, _, tableName) : _ -> + let cols = group + |> map (\(_, col, _) -> (cmAttnum col, col)) + |> Map.fromList -- map attnum -> ColumnMeta + order = group |> map (\(_, ColumnMeta { cmAttnum }, _) -> cmAttnum) -- preserve column order + pks = Map.findWithDefault mempty relid pkMap -- lookup primary keys + fks = Map.findWithDefault mempty relid fkMap -- lookup foreign keys + meta = TableMeta + { tmOid = relid + , tmName = tableName + , tmColumns = cols + , tmColumnOrder = order + , tmPrimaryKeys = pks + , tmForeignKeys = fks + } -- build TableMeta + in Map.insert relid meta acc -- add to result map + ) + mempty -- start with empty map + +-- | Load type information for the given OIDs. +-- High-level: fetch pg_type metadata recursively for arrays. +loadTypeInfo :: PG.Connection -> [PQ.Oid] -> IO (Map.Map PQ.Oid PgTypeInfo) +loadTypeInfo _ [] = pure mempty -- no types requested +loadTypeInfo conn typeOids = do + let requested = Set.fromList typeOids -- track requested OIDs + rows <- PG.query conn -- fetch pg_type rows for requested OIDs + (mconcat + [ "SELECT oid, typname, typelem, typtype, typnamespace::regnamespace::text " + , "FROM pg_type " + , "WHERE oid = ANY(?)" + ]) + (PG.Only (PG.PGArray (map toPGOid typeOids)) :: PG.Only (PG.PGArray PG.Oid)) -- parameterize the OID list + let (typeMap, missing) = + rows + |> foldl' + (\(acc, missingAcc) (oid :: PG.Oid, name :: Text, elemOid :: PG.Oid, typtype :: Maybe Text, nsp :: Maybe Text) -> + let thisOid = toPQOid oid -- convert to libpq Oid type + elemOid' = if elemOid == PG.Oid 0 then Nothing else Just (toPQOid elemOid) -- ignore 0 elem + nextMissing = case elemOid' of + Just o | o `Set.notMember` requested -> o : missingAcc -- queue missing element types + _ -> missingAcc + in ( Map.insert thisOid PgTypeInfo + { ptiOid = thisOid + , ptiName = name + , ptiElem = elemOid' + , ptiType = typtype >>= (listToMaybe . CS.cs) -- extract single char typtype + , ptiNamespace = nsp + } + acc + , nextMissing + ) + ) + (mempty, []) -- start with empty map and missing list + extras <- loadTypeInfo conn missing -- recursively load missing element types + pure (typeMap <> extras) -- merge base and extra type info + +-- Stub metadata ------------------------------------------------------- + +-- Cache mapping stub file path to parsed SQL metadata. +type StubCache = Map.Map FilePath (Map.Map Text DescribeResult) + +{-# NOINLINE describeStubCache #-} -- ensure the IORef is shared and not duplicated +-- Global cache for stub metadata (safe because file contents are immutable). +describeStubCache :: IORef StubCache +describeStubCache = unsafePerformIO (newIORef mempty) -- initialize to empty + +-- Load stub entries from cache or parse if missing. +loadStubEntries :: FilePath -> IO (Map.Map Text DescribeResult) +loadStubEntries path = do + cache <- readIORef describeStubCache -- read current cache + case Map.lookup path cache of + Just entries -> pure entries -- cache hit + Nothing -> do + entries <- parseStubFile path -- parse the stub file + atomicModifyIORef' describeStubCache (\m -> (Map.insert path entries m, ())) -- store in cache + pure entries + +-- Parse a stub file on disk into DescribeResult values. +parseStubFile :: FilePath -> IO (Map.Map Text DescribeResult) +parseStubFile path = do + bytes <- BS.readFile path -- read JSON file + stubFile <- either (fail . ("typedSql: failed to parse stub file: " <>) ) pure (Aeson.eitherDecodeStrict' bytes :: Either String DescribeStubFile) -- decode JSON + pure (buildStubEntries stubFile) -- build lookup map + +-- Convert a DescribeStubFile into a map keyed by SQL text. +buildStubEntries :: DescribeStubFile -> Map.Map Text DescribeResult +buildStubEntries DescribeStubFile { stubFileQueries } = + foldl' + (\acc entry -> + Map.insert (stubEntrySql entry) (stubEntryToDescribe entry) acc + ) + mempty + stubFileQueries + +-- Top-level JSON structure for stub metadata. +data DescribeStubFile = DescribeStubFile + { stubFileQueries :: ![DescribeStubEntry] -- list of stubbed statements + } + +instance Aeson.FromJSON DescribeStubFile where + parseJSON = Aeson.withObject "DescribeStubFile" \obj -> + DescribeStubFile <$> obj .:? "queries" .!= [] -- default to empty list + +-- One stubbed SQL statement entry. +data DescribeStubEntry = DescribeStubEntry + { stubEntrySql :: !Text -- SQL text used as lookup key + , stubEntryParams :: ![Word32] -- parameter type OIDs + , stubEntryColumns :: ![DescribeStubColumn] -- result columns + , stubEntryTables :: ![DescribeStubTable] -- table metadata + , stubEntryTypes :: ![DescribeStubType] -- type metadata + } + +instance Aeson.FromJSON DescribeStubEntry where + parseJSON = Aeson.withObject "DescribeStubEntry" \obj -> + DescribeStubEntry + <$> obj .: "sql" -- required SQL string + <*> obj .:? "params" .!= [] -- optional params list + <*> obj .:? "columns" .!= [] -- optional columns list + <*> obj .:? "tables" .!= [] -- optional tables list + <*> obj .:? "types" .!= [] -- optional types list + +-- Column description inside a stub entry. +data DescribeStubColumn = DescribeStubColumn + { stubColumnName :: !Text -- column name + , stubColumnType :: !Word32 -- type OID + , stubColumnTable :: !Word32 -- table OID (0 if none) + , stubColumnAttnum :: !(Maybe Int) -- attribute number, if known + } + +instance Aeson.FromJSON DescribeStubColumn where + parseJSON = Aeson.withObject "DescribeStubColumn" \obj -> + DescribeStubColumn + <$> obj .: "name" -- required name + <*> obj .: "typeOid" -- required type OID + <*> obj .:? "tableOid" .!= 0 -- default to 0 (no table) + <*> obj .:? "attnum" -- optional attnum + +-- Table description inside a stub entry. +data DescribeStubTable = DescribeStubTable + { stubTableOid :: !Word32 -- table OID + , stubTableName :: !Text -- table name + , stubTableColumns :: ![DescribeStubTableColumn] -- table columns + , stubTablePrimaryKeys :: ![Int] -- primary key attnums + , stubTableForeignKeys :: ![DescribeStubForeignKey] -- foreign keys + } + +instance Aeson.FromJSON DescribeStubTable where + parseJSON = Aeson.withObject "DescribeStubTable" \obj -> + DescribeStubTable + <$> obj .: "oid" -- required OID + <*> obj .: "name" -- required table name + <*> obj .:? "columns" .!= [] -- optional columns list + <*> obj .:? "primaryKeys" .!= [] -- optional PK list + <*> obj .:? "foreignKeys" .!= [] -- optional FK list + +-- Column description inside a stubbed table. +data DescribeStubTableColumn = DescribeStubTableColumn + { stubTableColumnAttnum :: !Int -- attribute number in table + , stubTableColumnName :: !Text -- column name + , stubTableColumnType :: !Word32 -- type OID + , stubTableColumnNotNull :: !Bool -- NOT NULL flag + } + +instance Aeson.FromJSON DescribeStubTableColumn where + parseJSON = Aeson.withObject "DescribeStubTableColumn" \obj -> + DescribeStubTableColumn + <$> obj .: "attnum" -- required attribute number + <*> obj .: "name" -- required column name + <*> obj .: "typeOid" -- required type OID + <*> obj .:? "notNull" .!= False -- default false for nullable + +-- Foreign key description inside a stubbed table. +data DescribeStubForeignKey = DescribeStubForeignKey + { stubForeignKeyAttnum :: !Int -- local column attnum + , stubForeignKeyReferences :: !Word32 -- referenced table OID + } + +instance Aeson.FromJSON DescribeStubForeignKey where + parseJSON = Aeson.withObject "DescribeStubForeignKey" \obj -> + DescribeStubForeignKey + <$> obj .: "attnum" -- required local attnum + <*> obj .: "references" -- required referenced table OID + +-- Type description inside a stub entry. +data DescribeStubType = DescribeStubType + { stubTypeOid :: !Word32 -- type OID + , stubTypeName :: !Text -- type name + , stubTypeElemOid :: !(Maybe Word32) -- element type OID for arrays + , stubTypeTyptype :: !(Maybe Text) -- typtype as text + , stubTypeNamespace :: !(Maybe Text) -- namespace name + } + +instance Aeson.FromJSON DescribeStubType where + parseJSON = Aeson.withObject "DescribeStubType" \obj -> + DescribeStubType + <$> obj .: "oid" -- required OID + <*> obj .: "name" -- required type name + <*> obj .:? "elemOid" -- optional element OID + <*> obj .:? "typtype" -- optional typtype + <*> obj .:? "namespace" -- optional namespace + +-- Convert a stub entry into a DescribeResult. +stubEntryToDescribe :: DescribeStubEntry -> DescribeResult +stubEntryToDescribe DescribeStubEntry { .. } = + DescribeResult + { drParams = map oidFromWord stubEntryParams -- convert param OIDs + , drColumns = map stubColumnToDescribe stubEntryColumns -- convert columns + , drTables = Map.fromList (map stubTableToDescribe stubEntryTables) -- convert tables + , drTypes = Map.fromList (map stubTypeToDescribe stubEntryTypes) -- convert types + } + +-- Convert a stub column into a DescribeColumn. +stubColumnToDescribe :: DescribeStubColumn -> DescribeColumn +stubColumnToDescribe DescribeStubColumn { .. } = + DescribeColumn + { dcName = CS.cs stubColumnName -- convert Text to ByteString + , dcType = oidFromWord stubColumnType -- convert type OID + , dcTable = oidFromWord stubColumnTable -- convert table OID + , dcAttnum = stubColumnAttnum -- keep attribute number + } + +-- Convert a stub table into TableMeta. +stubTableToDescribe :: DescribeStubTable -> (PQ.Oid, TableMeta) +stubTableToDescribe DescribeStubTable { .. } = + ( tableOid + , TableMeta + { tmOid = tableOid + , tmName = stubTableName + , tmColumns = Map.fromList (map (\col -> (stubTableColumnAttnum col, stubColumnMeta col)) stubTableColumns) + , tmColumnOrder = map stubTableColumnAttnum stubTableColumns + , tmPrimaryKeys = Set.fromList stubTablePrimaryKeys + , tmForeignKeys = Map.fromList (map (\fk -> (stubForeignKeyAttnum fk, oidFromWord (stubForeignKeyReferences fk))) stubTableForeignKeys) + } + ) + where + tableOid = oidFromWord stubTableOid -- convert table OID + stubColumnMeta DescribeStubTableColumn { .. } = ColumnMeta + { cmAttnum = stubTableColumnAttnum + , cmName = stubTableColumnName + , cmTypeOid = oidFromWord stubTableColumnType + , cmNotNull = stubTableColumnNotNull + } + +-- Convert a stub type into PgTypeInfo. +stubTypeToDescribe :: DescribeStubType -> (PQ.Oid, PgTypeInfo) +stubTypeToDescribe DescribeStubType { .. } = + ( oid + , PgTypeInfo + { ptiOid = oid + , ptiName = stubTypeName + , ptiElem = stubTypeElemOid >>= nonZeroOid + , ptiType = stubTypeTyptype >>= extractTyptype + , ptiNamespace = stubTypeNamespace + } + ) + where + oid = oidFromWord stubTypeOid -- convert type OID + extractTyptype text = case CS.cs text :: String of + (c:_) -> Just c -- take the first character + _ -> Nothing -- empty or invalid typtype + nonZeroOid w = if w == 0 then Nothing else Just (oidFromWord w) -- ignore 0 sentinel + +-- Convert a Word32 from JSON into a libpq Oid. +oidFromWord :: Word32 -> PQ.Oid +oidFromWord = PQ.Oid . fromIntegral -- convert numeric width + +-- | Build the Haskell type for a parameter, based on its OID. +-- High-level: map a PG type OID into a TH Type. +hsTypeForParam :: Map.Map PQ.Oid PgTypeInfo -> PQ.Oid -> TH.TypeQ +hsTypeForParam typeInfo oid = maybe (fail (CS.cs unknown)) (hsTypeForPg typeInfo False) (Map.lookup oid typeInfo) + where + unknown = "typedSql: missing type information for parameter oid " <> show oid -- error message + +-- | Build the result type for the described columns. +-- High-level: pick a model type for table.* or a tuple type for ad-hoc select lists. +hsTypeForColumns :: Map.Map PQ.Oid PgTypeInfo -> Map.Map PQ.Oid TableMeta -> [DescribeColumn] -> TH.TypeQ +hsTypeForColumns typeInfo tables cols = do + case detectFullTable tables cols of + Just tableName -> + pure (TH.ConT (TH.mkName (CS.cs ("Generated.ActualTypes." <> tableNameToModelName tableName)))) -- use model type + Nothing -> do + hsCols <- mapM (hsTypeForColumn typeInfo tables) cols -- map each column to a type + case hsCols of + [single] -> pure single -- single column: no tuple + _ -> pure $ foldl TH.AppT (TH.TupleT (length hsCols)) hsCols -- multiple columns: tuple + +-- | Detect whether the columns represent a full table selection (table.* with all columns in order). +-- High-level: if yes, we can return the model type directly. +detectFullTable :: Map.Map PQ.Oid TableMeta -> [DescribeColumn] -> Maybe Text +detectFullTable tables cols = do + guard (not (null cols)) -- need at least one column + let grouped = + cols + |> List.groupBy (\a b -> dcTable a == dcTable b) -- group by table OID + |> mapMaybe (\group -> case List.uncons group of + Just (first, _) -> Just (dcTable first, group) + Nothing -> Nothing + ) -- pair table OID with group + case grouped of + [(tableOid, colGroup)] | tableOid /= PQ.Oid 0 -> do + TableMeta { tmColumnOrder } <- Map.lookup tableOid tables -- look up column order + let attnums = mapMaybe dcAttnum colGroup -- extract attnums, ignoring unknown + guard (attnums == tmColumnOrder) -- must match exact order + TableMeta { tmName } <- Map.lookup tableOid tables -- fetch table name + pure tmName -- signal full-table selection + _ -> Nothing -- not a full table selection + +-- Map a single column into a Haskell type, with key-aware rules. +hsTypeForColumn :: Map.Map PQ.Oid PgTypeInfo -> Map.Map PQ.Oid TableMeta -> DescribeColumn -> TH.TypeQ +hsTypeForColumn typeInfo tables DescribeColumn { dcType, dcTable, dcAttnum } = + case (Map.lookup dcTable tables, dcAttnum) of + (Just TableMeta { tmName = tableName, tmPrimaryKeys, tmForeignKeys, tmColumns }, Just attnum) -> do + let baseType = Map.lookup attnum tmColumns >>= \ColumnMeta { cmTypeOid } -> Map.lookup cmTypeOid typeInfo -- base PG type + let nullable = maybe True (not . cmNotNull) (Map.lookup attnum tmColumns) -- default to nullable if unknown + case () of + _ | attnum `Set.member` tmPrimaryKeys -> + pure (wrapNull nullable (idType tableName)) -- primary key becomes Id + | Just refTable <- Map.lookup attnum tmForeignKeys -> + case Map.lookup refTable tables of + Just TableMeta { tmName = refName } -> + pure (wrapNull nullable (idType refName)) -- foreign key becomes Id of referenced table + Nothing -> + maybe (fail (CS.cs missingType)) (hsTypeForPg typeInfo nullable) baseType -- fallback + | otherwise -> + maybe (fail (CS.cs missingType)) (hsTypeForPg typeInfo nullable) baseType -- normal column mapping + where + missingType = "typedSql: missing type info for column " <> show attnum <> " of table " <> tableName + _ -> + maybe (fail (CS.cs ("typedSql: missing type info for column oid " <> show dcType))) (hsTypeForPg typeInfo True) (Map.lookup dcType typeInfo) -- unknown table info + +-- Wrap a type in Maybe when nullable. +wrapNull :: Bool -> TH.Type -> TH.Type +wrapNull nullable ty = if nullable then TH.AppT (TH.ConT ''Maybe) ty else ty -- add Maybe for nullable columns + +-- Build the Id' type for a table name. +idType :: Text -> TH.Type +idType tableName = TH.AppT (TH.ConT ''Id') (TH.LitT (TH.StrTyLit (CS.cs tableName))) -- Id' "table" + +-- Map Postgres type metadata to a Haskell type. +hsTypeForPg :: Map.Map PQ.Oid PgTypeInfo -> Bool -> PgTypeInfo -> TH.TypeQ +hsTypeForPg typeInfo nullable PgTypeInfo { ptiName, ptiElem, ptiType } = do + base <- case () of + _ | Just elemOid <- ptiElem -> do + elemInfo <- maybe (fail (CS.cs ("typedSql: missing array element type for " <> ptiName))) pure (Map.lookup elemOid typeInfo) -- lookup element + elemTy <- hsTypeForPg typeInfo False elemInfo -- map element type + pure (TH.AppT TH.ListT elemTy) -- array maps to list + _ | ptiName `elem` ["int2", "int4"] -> pure (TH.ConT ''Int) -- small/regular int + _ | ptiName == "int8" -> pure (TH.ConT ''Integer) -- 64-bit int + _ | ptiName `elem` ["text", "varchar", "bpchar", "citext"] -> pure (TH.ConT ''Text) -- text types + _ | ptiName == "bool" -> pure (TH.ConT ''Bool) -- boolean + _ | ptiName == "uuid" -> pure (TH.ConT ''UUID) -- UUID + _ | ptiName == "timestamptz" -> pure (TH.ConT ''UTCTime) -- timestamp with time zone + _ | ptiName == "timestamp" -> pure (TH.ConT ''LocalTime) -- timestamp without time zone + _ | ptiName == "date" -> pure (TH.ConT ''Day) -- date + _ | ptiName == "time" -> pure (TH.ConT ''TimeOfDay) -- time + _ | ptiName `elem` ["json", "jsonb"] -> pure (TH.ConT ''Aeson.Value) -- JSON value + _ | ptiName == "bytea" -> pure (TH.AppT (TH.ConT ''PG.Binary) (TH.ConT ''BS.ByteString)) -- byte array + _ | ptiName == "float4" -> pure (TH.ConT ''Float) -- 32-bit float + _ | ptiName == "float8" -> pure (TH.ConT ''Double) -- 64-bit float + _ | ptiName == "numeric" -> pure (TH.ConT ''Scientific) -- arbitrary precision numeric + _ | ptiName == "point" -> pure (TH.ConT ''PGPoint.Point) -- point type + _ | ptiName == "polygon" -> pure (TH.ConT ''PGPolygon.Polygon) -- polygon type + _ | ptiName == "inet" -> pure (TH.ConT ''IP) -- inet type + _ | ptiName == "tsvector" -> pure (TH.ConT ''PGTs.TSVector) -- full-text search vector + _ | ptiName == "interval" -> pure (TH.ConT ''PGTime.PGInterval) -- interval type + _ | ptiType == Just 'e' -> + pure (TH.ConT (TH.mkName (CS.cs ("Generated.Enums." <> tableNameToModelName ptiName)))) -- enum type + _ | ptiType == Just 'c' -> + pure (TH.ConT (TH.mkName (CS.cs ("Generated.ActualTypes." <> tableNameToModelName ptiName)))) -- composite type + _ -> pure (TH.ConT (TH.mkName (CS.cs ("Generated.ActualTypes." <> tableNameToModelName ptiName)))) -- fallback to generated type + pure (wrapNull nullable base) -- apply nullability wrapper diff --git a/ihp/Test/Test/Main.hs b/ihp/Test/Test/Main.hs index 343b9e096..14399bfe0 100644 --- a/ihp/Test/Test/Main.hs +++ b/ihp/Test/Test/Main.hs @@ -1,24 +1,25 @@ module Main where -import Test.Hspec -import IHP.Prelude +import IHP.Prelude +import Test.Hspec -import qualified Test.ValidationSupport.ValidateFieldSpec -import qualified Test.NameSupportSpec -import qualified Test.HaskellSupportSpec -import qualified Test.View.CSSFrameworkSpec -import qualified Test.View.FormSpec +import qualified Test.Controller.AccessDeniedSpec import qualified Test.Controller.ContextSpec -import qualified Test.Controller.ParamSpec import qualified Test.Controller.CookieSpec -import qualified Test.Controller.AccessDeniedSpec import qualified Test.Controller.NotFoundSpec +import qualified Test.Controller.ParamSpec +import qualified Test.FileStorage.ControllerFunctionsSpec +import qualified Test.HaskellSupportSpec import qualified Test.ModelSupportSpec +import qualified Test.NameSupportSpec +import qualified Test.PGListenerSpec import qualified Test.QueryBuilderSpec import qualified Test.RouterSupportSpec +import qualified Test.TypedSqlSpec +import qualified Test.ValidationSupport.ValidateFieldSpec +import qualified Test.View.CSSFrameworkSpec +import qualified Test.View.FormSpec import qualified Test.ViewSupportSpec -import qualified Test.FileStorage.ControllerFunctionsSpec -import qualified Test.PGListenerSpec main :: IO () main = hspec do @@ -38,3 +39,4 @@ main = hspec do Test.FileStorage.ControllerFunctionsSpec.tests Test.Controller.CookieSpec.tests Test.PGListenerSpec.tests + Test.TypedSqlSpec.tests diff --git a/ihp/Test/Test/TypedSqlSpec.hs b/ihp/Test/Test/TypedSqlSpec.hs new file mode 100644 index 000000000..2ebde72d2 --- /dev/null +++ b/ihp/Test/Test/TypedSqlSpec.hs @@ -0,0 +1,48 @@ +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE TemplateHaskell #-} +{-| +Module: Test.TypedSqlSpec +-} +module Test.TypedSqlSpec where + +import IHP.Prelude +import Test.Hspec +import IHP.TypedSql +import IHP.ModelSupport +import qualified Language.Haskell.TH.Syntax as TH +import System.Environment (setEnv) +import Database.PostgreSQL.Simple.Types (Query (..)) +import qualified Data.ByteString.Char8 as ByteString + +$(do + TH.runIO (setEnv "IHP_TYPED_SQL_STUB" "Test/Test/TypedSqlStub.json") + pure [] + ) + +-- Define primary keys for the tables referenced in the stub metadata + +type instance PrimaryKey "users" = UUID +type instance PrimaryKey "posts" = UUID + +tests :: Spec +tests = describe "TypedSql" do + it "infers Id and Text columns" do + let userId :: UUID + userId = "11111111-1111-1111-1111-111111111111" + typed :: TypedQuery (Id' "users", Text) + typed = [typedSql|SELECT users.id, users.name FROM users WHERE users.id = ${userId}|] + Query sqlBytes = tqQuery typed + sqlBytes `shouldBe` ByteString.pack "SELECT users.id, users.name FROM users WHERE users.id = $1" + + it "maps nullable foreign keys to Maybe Id'" do + let slug :: Text + slug = "hello-world" + typed :: TypedQuery (Maybe (Id' "users")) + typed = [typedSql|SELECT posts.author_id FROM posts WHERE posts.slug = ${slug}|] + length (tqParams typed) `shouldBe` 1 + + it "infers aggregate columns" do + let typed :: TypedQuery (Maybe Integer) + typed = [typedSql|SELECT COUNT(*) FROM posts|] + Query sqlBytes = tqQuery typed + sqlBytes `shouldBe` ByteString.pack "SELECT COUNT(*) FROM posts" diff --git a/ihp/Test/Test/TypedSqlStub.json b/ihp/Test/Test/TypedSqlStub.json new file mode 100644 index 000000000..f85625a59 --- /dev/null +++ b/ihp/Test/Test/TypedSqlStub.json @@ -0,0 +1,77 @@ +{ + "queries": [ + { + "sql": "SELECT users.id, users.name FROM users WHERE users.id = $1", + "params": [2950], + "columns": [ + {"name": "id", "typeOid": 2950, "tableOid": 100000, "attnum": 1}, + {"name": "name", "typeOid": 25, "tableOid": 100000, "attnum": 2} + ], + "tables": [ + { + "oid": 100000, + "name": "users", + "columns": [ + {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, + {"attnum": 2, "name": "name", "typeOid": 25, "notNull": true}, + {"attnum": 3, "name": "email", "typeOid": 25, "notNull": true} + ], + "primaryKeys": [1], + "foreignKeys": [] + } + ], + "types": [ + {"oid": 2950, "name": "uuid", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"}, + {"oid": 25, "name": "text", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"} + ] + }, + { + "sql": "SELECT posts.author_id FROM posts WHERE posts.slug = $1", + "params": [25], + "columns": [ + {"name": "author_id", "typeOid": 2950, "tableOid": 100001, "attnum": 3} + ], + "tables": [ + { + "oid": 100001, + "name": "posts", + "columns": [ + {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, + {"attnum": 2, "name": "title", "typeOid": 25, "notNull": true}, + {"attnum": 3, "name": "author_id", "typeOid": 2950, "notNull": false} + ], + "primaryKeys": [1], + "foreignKeys": [ + {"attnum": 3, "references": 100000} + ] + }, + { + "oid": 100000, + "name": "users", + "columns": [ + {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, + {"attnum": 2, "name": "name", "typeOid": 25, "notNull": true}, + {"attnum": 3, "name": "email", "typeOid": 25, "notNull": true} + ], + "primaryKeys": [1], + "foreignKeys": [] + } + ], + "types": [ + {"oid": 2950, "name": "uuid", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"}, + {"oid": 25, "name": "text", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"} + ] + }, + { + "sql": "SELECT COUNT(*) FROM posts", + "params": [], + "columns": [ + {"name": "count", "typeOid": 20, "tableOid": 0, "attnum": null} + ], + "tables": [], + "types": [ + {"oid": 20, "name": "int8", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"} + ] + } + ] +} diff --git a/ihp/ihp.cabal b/ihp/ihp.cabal index f43fea857..4114cf937 100644 --- a/ihp/ihp.cabal +++ b/ihp/ihp.cabal @@ -60,6 +60,7 @@ common shared-properties , inflections , text , postgresql-simple + , postgresql-libpq , wai-app-static , wai-util , bytestring @@ -198,6 +199,7 @@ library , IHP.ModelSupport , IHP.NameSupport , IHP.QueryBuilder + , IHP.TypedSql , IHP.Fetch , IHP.RouterPrelude , IHP.Server @@ -270,3 +272,5 @@ test-suite tests Test.FileStorage.ControllerFunctionsSpec Test.Controller.CookieSpec Test.PGListenerSpec + Test.AutoRefreshSpec + Test.TypedSqlSpec From 692df7378337c94fc7356585a8a43509b4f94a4a Mon Sep 17 00:00:00 2001 From: vcombey Date: Wed, 31 Dec 2025 15:53:44 +0100 Subject: [PATCH 02/13] typed sql poc --- ihp/IHP/TypedSql.hs | 163 ++++++++++++++++++---------------- ihp/Test/Test/TypedSqlSpec.hs | 18 ++-- 2 files changed, 93 insertions(+), 88 deletions(-) diff --git a/ihp/IHP/TypedSql.hs b/ihp/IHP/TypedSql.hs index 7c164624c..12d678026 100644 --- a/ihp/IHP/TypedSql.hs +++ b/ihp/IHP/TypedSql.hs @@ -1,8 +1,8 @@ -{-# LANGUAGE ImplicitParams #-} -- allow ?modelContext implicit parameter -{-# LANGUAGE NamedFieldPuns #-} -- allow record field puns like {field} -{-# LANGUAGE QuasiQuotes #-} -- enable [typedSql|...|] quasiquoter syntax -{-# LANGUAGE RecordWildCards #-} -- allow {..} to bind all record fields -{-# LANGUAGE TemplateHaskell #-} -- allow compile-time code generation via TH +{-# LANGUAGE ImplicitParams #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} -- Module implementing the typed SQL quasiquoter and its supporting helpers. module IHP.TypedSql @@ -12,48 +12,53 @@ module IHP.TypedSql , runTypedOne -- execute a typed query expecting exactly one row ) where -import IHP.Prelude -- IHP's standard prelude with common utilities -import Control.Monad (guard) -- guard for Maybe-based checks -import qualified Data.Aeson as Aeson -- JSON parsing for stub metadata -import Data.Aeson ((.:), (.:?), (.!=)) -- JSON field operators used in FromJSON instances -import qualified Data.ByteString as BS -- byte string for raw SQL and file IO -import qualified Data.List as List -- list helpers like groupBy -import Data.Maybe (mapMaybe) -- mapMaybe for grouping columns safely -import qualified Data.Map.Strict as Map -- strict map used for metadata lookup -import qualified Data.Set as Set -- set used for OID collection -import qualified Data.String.Conversions as CS -- convert between Text/String/ByteString -import Data.IORef (IORef, atomicModifyIORef', newIORef, readIORef) -- mutable cache for stubs -import Data.Scientific (Scientific) -- numeric type for Postgres numeric -import Data.Time (LocalTime, TimeOfDay, UTCTime) -- time types for mapping -import Data.Time.Calendar (Day) -- date-only type -import Data.UUID (UUID) -- UUID mapping -import Data.Word (Word32) -- raw OID storage in stub files -import qualified Database.PostgreSQL.LibPQ as PQ -- low-level libpq for describe/prepare -import qualified Database.PostgreSQL.Simple as PG -- high-level postgres-simple API -import qualified Database.PostgreSQL.Simple.FromRow as PGFR -- row parser typeclass -import qualified Database.PostgreSQL.Simple.ToField as PGTF -- parameter encoding -import qualified Database.PostgreSQL.Simple.ToRow as PGTR -- ToRow instance for PreparedRow -import qualified Database.PostgreSQL.Simple.Types as PG -- extra postgres-simple types (Binary, Oid, etc.) -import qualified Language.Haskell.Meta.Parse as HaskellMeta -- parse placeholder expressions as Haskell AST -import qualified Language.Haskell.TH as TH -- Template Haskell core types -import qualified Language.Haskell.TH.Quote as TH -- QuasiQuoter type -import Net.IP (IP) -- inet type mapping - -import IHP.FrameworkConfig (defaultDatabaseUrl) -- resolve DB URL for describe -import IHP.ModelSupport (Id', ModelContext, measureTimeIfLogging, withDatabaseConnection, withRLSParams) -- query helpers and Id type -import IHP.NameSupport (tableNameToModelName) -- convert SQL table names to Haskell type names -import qualified IHP.Postgres.Point as PGPoint -- Postgres point mapping -import qualified IHP.Postgres.Polygon as PGPolygon -- Postgres polygon mapping -import qualified IHP.Postgres.TSVector as PGTs -- tsvector mapping -import qualified IHP.Postgres.TimeParser as PGTime -- interval mapping -import System.Environment (lookupEnv) -- read stub path env var -import System.IO.Unsafe (unsafePerformIO) -- top-level cache with IORef +import Control.Monad (guard) +import Data.Aeson ((.!=), (.:), (.:?)) +import qualified Data.Aeson as Aeson +import qualified Data.ByteString as BS +import Data.IORef (IORef, atomicModifyIORef', + newIORef, readIORef) +import qualified Data.List as List +import qualified Data.Map.Strict as Map +import Data.Maybe (mapMaybe) +import Data.Scientific (Scientific) +import qualified Data.Set as Set +import qualified Data.String.Conversions as CS +import Data.Time (LocalTime, TimeOfDay, + UTCTime) +import Data.Time.Calendar (Day) +import Data.UUID (UUID) +import Data.Word (Word32) +import qualified Database.PostgreSQL.LibPQ as PQ +import qualified Database.PostgreSQL.Simple as PG +import qualified Database.PostgreSQL.Simple.FromRow as PGFR +import qualified Database.PostgreSQL.Simple.ToField as PGTF +import qualified Database.PostgreSQL.Simple.ToRow as PGTR +import qualified Database.PostgreSQL.Simple.Types as PG +import IHP.Prelude +import qualified Language.Haskell.Meta.Parse as HaskellMeta +import qualified Language.Haskell.TH as TH +import qualified Language.Haskell.TH.Quote as TH +import Net.IP (IP) + +import IHP.FrameworkConfig (defaultDatabaseUrl) +import IHP.ModelSupport (Id', ModelContext, + measureTimeIfLogging, + withDatabaseConnection, + withRLSParams) +import IHP.NameSupport (tableNameToModelName) +import qualified IHP.Postgres.Point as PGPoint +import qualified IHP.Postgres.Polygon as PGPolygon +import qualified IHP.Postgres.TimeParser as PGTime +import qualified IHP.Postgres.TSVector as PGTs +import System.Environment (lookupEnv) +import System.IO.Unsafe (unsafePerformIO) -- | Prepared query with a custom row parser. -- High-level: this is the runtime value produced by the typed SQL quasiquoter. data TypedQuery result = TypedQuery - { tqQuery :: !PG.Query -- low-level: SQL text packaged for postgres-simple - , tqParams :: ![PGTF.Action] -- low-level: encoded parameter values for the query + { tqQuery :: !PG.Query -- low-level: SQL text packaged for postgres-simple + , tqParams :: ![PGTF.Action] -- low-level: encoded parameter values for the query , tqRowParser :: !(PGFR.RowParser result) -- low-level: how to decode each row into result } @@ -103,33 +108,33 @@ typedSql = -- | Result of describing a statement. -- High-level: captures everything we need to infer Haskell types. data DescribeResult = DescribeResult - { drParams :: ![PQ.Oid] -- low-level: parameter OIDs in order + { drParams :: ![PQ.Oid] -- low-level: parameter OIDs in order , drColumns :: ![DescribeColumn] -- low-level: column metadata in order - , drTables :: !(Map.Map PQ.Oid TableMeta) -- low-level: table metadata by OID - , drTypes :: !(Map.Map PQ.Oid PgTypeInfo) -- low-level: type metadata by OID + , drTables :: !(Map.Map PQ.Oid TableMeta) -- low-level: table metadata by OID + , drTypes :: !(Map.Map PQ.Oid PgTypeInfo) -- low-level: type metadata by OID } -- Metadata for a column in the result set. data DescribeColumn = DescribeColumn - { dcName :: !BS.ByteString -- low-level: column name as bytes - , dcType :: !PQ.Oid -- low-level: column type OID - , dcTable :: !PQ.Oid -- low-level: originating table OID (0 if none) + { dcName :: !BS.ByteString -- low-level: column name as bytes + , dcType :: !PQ.Oid -- low-level: column type OID + , dcTable :: !PQ.Oid -- low-level: originating table OID (0 if none) , dcAttnum :: !(Maybe Int) -- low-level: attribute number inside table, if known } -- Column details extracted from pg_attribute. data ColumnMeta = ColumnMeta - { cmAttnum :: !Int -- low-level: attribute number within table - , cmName :: !Text -- low-level: column name + { cmAttnum :: !Int -- low-level: attribute number within table + , cmName :: !Text -- low-level: column name , cmTypeOid :: !PQ.Oid -- low-level: type OID for this column , cmNotNull :: !Bool -- low-level: whether the column is NOT NULL } -- Table metadata, including columns and key relationships. data TableMeta = TableMeta - { tmOid :: !PQ.Oid -- low-level: table OID - , tmName :: !Text -- low-level: table name - , tmColumns :: !(Map.Map Int ColumnMeta) -- low-level: columns keyed by attnum + { tmOid :: !PQ.Oid -- low-level: table OID + , tmName :: !Text -- low-level: table name + , tmColumns :: !(Map.Map Int ColumnMeta) -- low-level: columns keyed by attnum , tmColumnOrder :: ![Int] -- low-level: column order as defined in table , tmPrimaryKeys :: !(Set.Set Int) -- low-level: attribute numbers of primary keys , tmForeignKeys :: !(Map.Map Int PQ.Oid) -- low-level: attnum -> referenced table oid @@ -137,10 +142,10 @@ data TableMeta = TableMeta -- Postgres type metadata we need for Haskell mapping. data PgTypeInfo = PgTypeInfo - { ptiOid :: !PQ.Oid -- low-level: type OID - , ptiName :: !Text -- low-level: type name (typname) - , ptiElem :: !(Maybe PQ.Oid) -- low-level: element type for arrays - , ptiType :: !(Maybe Char) -- low-level: typtype code (e.g. 'e' for enum) + { ptiOid :: !PQ.Oid -- low-level: type OID + , ptiName :: !Text -- low-level: type name (typname) + , ptiElem :: !(Maybe PQ.Oid) -- low-level: element type for arrays + , ptiType :: !(Maybe Char) -- low-level: typtype code (e.g. 'e' for enum) , ptiNamespace :: !(Maybe Text) -- low-level: namespace name } @@ -162,7 +167,7 @@ typedSqlExp rawSql = do stubPath <- TH.runIO (lookupEnv "IHP_TYPED_SQL_STUB") -- optional path to a stub file describeResult <- TH.runIO $ case stubPath of Just path -> describeUsingStub path sqlText -- offline: use stub metadata - Nothing -> describeStatement (CS.cs sqlText) -- online: ask Postgres to describe + Nothing -> describeStatement (CS.cs sqlText) -- online: ask Postgres to describe let DescribeResult { drParams, drColumns, drTables, drTypes } = describeResult -- unpack metadata when (length drParams /= length parsedExprs) $ -- make sure counts match @@ -433,11 +438,11 @@ instance Aeson.FromJSON DescribeStubFile where -- One stubbed SQL statement entry. data DescribeStubEntry = DescribeStubEntry - { stubEntrySql :: !Text -- SQL text used as lookup key - , stubEntryParams :: ![Word32] -- parameter type OIDs + { stubEntrySql :: !Text -- SQL text used as lookup key + , stubEntryParams :: ![Word32] -- parameter type OIDs , stubEntryColumns :: ![DescribeStubColumn] -- result columns - , stubEntryTables :: ![DescribeStubTable] -- table metadata - , stubEntryTypes :: ![DescribeStubType] -- type metadata + , stubEntryTables :: ![DescribeStubTable] -- table metadata + , stubEntryTypes :: ![DescribeStubType] -- type metadata } instance Aeson.FromJSON DescribeStubEntry where @@ -451,9 +456,9 @@ instance Aeson.FromJSON DescribeStubEntry where -- Column description inside a stub entry. data DescribeStubColumn = DescribeStubColumn - { stubColumnName :: !Text -- column name - , stubColumnType :: !Word32 -- type OID - , stubColumnTable :: !Word32 -- table OID (0 if none) + { stubColumnName :: !Text -- column name + , stubColumnType :: !Word32 -- type OID + , stubColumnTable :: !Word32 -- table OID (0 if none) , stubColumnAttnum :: !(Maybe Int) -- attribute number, if known } @@ -467,9 +472,9 @@ instance Aeson.FromJSON DescribeStubColumn where -- Table description inside a stub entry. data DescribeStubTable = DescribeStubTable - { stubTableOid :: !Word32 -- table OID - , stubTableName :: !Text -- table name - , stubTableColumns :: ![DescribeStubTableColumn] -- table columns + { stubTableOid :: !Word32 -- table OID + , stubTableName :: !Text -- table name + , stubTableColumns :: ![DescribeStubTableColumn] -- table columns , stubTablePrimaryKeys :: ![Int] -- primary key attnums , stubTableForeignKeys :: ![DescribeStubForeignKey] -- foreign keys } @@ -485,9 +490,9 @@ instance Aeson.FromJSON DescribeStubTable where -- Column description inside a stubbed table. data DescribeStubTableColumn = DescribeStubTableColumn - { stubTableColumnAttnum :: !Int -- attribute number in table - , stubTableColumnName :: !Text -- column name - , stubTableColumnType :: !Word32 -- type OID + { stubTableColumnAttnum :: !Int -- attribute number in table + , stubTableColumnName :: !Text -- column name + , stubTableColumnType :: !Word32 -- type OID , stubTableColumnNotNull :: !Bool -- NOT NULL flag } @@ -501,7 +506,7 @@ instance Aeson.FromJSON DescribeStubTableColumn where -- Foreign key description inside a stubbed table. data DescribeStubForeignKey = DescribeStubForeignKey - { stubForeignKeyAttnum :: !Int -- local column attnum + { stubForeignKeyAttnum :: !Int -- local column attnum , stubForeignKeyReferences :: !Word32 -- referenced table OID } @@ -513,10 +518,10 @@ instance Aeson.FromJSON DescribeStubForeignKey where -- Type description inside a stub entry. data DescribeStubType = DescribeStubType - { stubTypeOid :: !Word32 -- type OID - , stubTypeName :: !Text -- type name - , stubTypeElemOid :: !(Maybe Word32) -- element type OID for arrays - , stubTypeTyptype :: !(Maybe Text) -- typtype as text + { stubTypeOid :: !Word32 -- type OID + , stubTypeName :: !Text -- type name + , stubTypeElemOid :: !(Maybe Word32) -- element type OID for arrays + , stubTypeTyptype :: !(Maybe Text) -- typtype as text , stubTypeNamespace :: !(Maybe Text) -- namespace name } @@ -587,7 +592,7 @@ stubTypeToDescribe DescribeStubType { .. } = oid = oidFromWord stubTypeOid -- convert type OID extractTyptype text = case CS.cs text :: String of (c:_) -> Just c -- take the first character - _ -> Nothing -- empty or invalid typtype + _ -> Nothing -- empty or invalid typtype nonZeroOid w = if w == 0 then Nothing else Just (oidFromWord w) -- ignore 0 sentinel -- Convert a Word32 from JSON into a libpq Oid. @@ -624,7 +629,7 @@ detectFullTable tables cols = do |> List.groupBy (\a b -> dcTable a == dcTable b) -- group by table OID |> mapMaybe (\group -> case List.uncons group of Just (first, _) -> Just (dcTable first, group) - Nothing -> Nothing + Nothing -> Nothing ) -- pair table OID with group case grouped of [(tableOid, colGroup)] | tableOid /= PQ.Oid 0 -> do diff --git a/ihp/Test/Test/TypedSqlSpec.hs b/ihp/Test/Test/TypedSqlSpec.hs index 2ebde72d2..1e50820e3 100644 --- a/ihp/Test/Test/TypedSqlSpec.hs +++ b/ihp/Test/Test/TypedSqlSpec.hs @@ -1,18 +1,18 @@ -{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-| Module: Test.TypedSqlSpec -} module Test.TypedSqlSpec where -import IHP.Prelude -import Test.Hspec -import IHP.TypedSql -import IHP.ModelSupport -import qualified Language.Haskell.TH.Syntax as TH -import System.Environment (setEnv) -import Database.PostgreSQL.Simple.Types (Query (..)) -import qualified Data.ByteString.Char8 as ByteString +import qualified Data.ByteString.Char8 as ByteString +import Database.PostgreSQL.Simple.Types (Query (..)) +import IHP.ModelSupport +import IHP.Prelude +import IHP.TypedSql +import qualified Language.Haskell.TH.Syntax as TH +import System.Environment (setEnv) +import Test.Hspec $(do TH.runIO (setEnv "IHP_TYPED_SQL_STUB" "Test/Test/TypedSqlStub.json") From f1516a0b4d7ade1f056bd0f8fe36219b3573a909 Mon Sep 17 00:00:00 2001 From: vcombey Date: Thu, 1 Jan 2026 13:50:17 +0100 Subject: [PATCH 03/13] include json stub for tests --- NixSupport/overlay.nix | 4 ++-- ihp/Test/Test/TypedSqlSpec.hs | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/NixSupport/overlay.nix b/NixSupport/overlay.nix index 270a2b596..3e66af016 100644 --- a/NixSupport/overlay.nix +++ b/NixSupport/overlay.nix @@ -7,7 +7,7 @@ final: prev: { overrides = self: super: let filter = inputs.nix-filter.lib; - localPackage = name: super.callCabal2nix name (filter { root = "${toString flakeRoot}/${name}"; include = [ (filter.matchExt "hs") (filter.matchExt "cabal") (filter.matchExt "md") filter.isDirectory "LICENSE" "data" ]; }) {}; + localPackage = name: super.callCabal2nix name (filter { root = "${toString flakeRoot}/${name}"; include = [ (filter.matchExt "hs") (filter.matchExt "cabal") (filter.matchExt "md") (filter.matchExt "json") filter.isDirectory "LICENSE" "data" ]; }) {}; in { ihp = localPackage "ihp"; ihp-ide = localPackage "ihp-ide"; @@ -42,4 +42,4 @@ final: prev: { }; }; }; -} \ No newline at end of file +} diff --git a/ihp/Test/Test/TypedSqlSpec.hs b/ihp/Test/Test/TypedSqlSpec.hs index 1e50820e3..c55899ea0 100644 --- a/ihp/Test/Test/TypedSqlSpec.hs +++ b/ihp/Test/Test/TypedSqlSpec.hs @@ -12,10 +12,13 @@ import IHP.Prelude import IHP.TypedSql import qualified Language.Haskell.TH.Syntax as TH import System.Environment (setEnv) +import System.FilePath (takeDirectory, ()) import Test.Hspec $(do - TH.runIO (setEnv "IHP_TYPED_SQL_STUB" "Test/Test/TypedSqlStub.json") + loc <- TH.location + let stubPath = takeDirectory (TH.loc_filename loc) "TypedSqlStub.json" + TH.runIO (setEnv "IHP_TYPED_SQL_STUB" stubPath) pure [] ) From 4272b8b632257534710876c2cfbc207c6f06ddb2 Mon Sep 17 00:00:00 2001 From: vcombey Date: Thu, 1 Jan 2026 17:34:50 +0100 Subject: [PATCH 04/13] use sql instead of stub file --- Guide/typed-sql.markdown | 146 ++++++----- ihp/IHP/TypedSql.hs | 419 ++++++++++++++----------------- ihp/Test/Test/TypedSqlSchema.sql | 11 + ihp/Test/Test/TypedSqlSpec.hs | 7 +- ihp/Test/Test/TypedSqlStub.json | 77 ------ ihp/ihp.cabal | 5 +- 6 files changed, 296 insertions(+), 369 deletions(-) create mode 100644 ihp/Test/Test/TypedSqlSchema.sql delete mode 100644 ihp/Test/Test/TypedSqlStub.json diff --git a/Guide/typed-sql.markdown b/Guide/typed-sql.markdown index da04f0aa9..d1c47c894 100644 --- a/Guide/typed-sql.markdown +++ b/Guide/typed-sql.markdown @@ -171,78 +171,92 @@ If you have custom types, add a `FromField` instance and extend There is no separate runtime connection layer. +## How typedSql works internally + +`typedSql` is implemented as a Template Haskell quasiquoter. The pipeline +looks like this: + +1. **Placeholder rewrite**: the SQL template is scanned for `${expr}` + placeholders. Each placeholder is replaced by `$1`, `$2`, ... and the + captured expressions are parsed as Haskell AST. +2. **Statement describe**: at compile time, `typedSql` prepares the query + and runs `DESCRIBE` via libpq. This returns: + - parameter OIDs (types for each `$N`) + - result column OIDs, table OIDs, and attribute numbers +3. **Catalog metadata fetch**: `typedSql` then queries `pg_catalog` to + resolve: + - table/column order (`pg_class`, `pg_attribute`) + - nullability (`pg_attribute.attnotnull`) + - primary/foreign keys (`pg_constraint`) + - enum/composite/array metadata (`pg_type`) +4. **IHP type mapping**: + - Primary keys become `Id' "table"`. + - Single-column foreign keys become `Id' "ref_table"`. + - Enums map to `Generated.Enums.`. + - Composite types map to `Generated.ActualTypes.`. + - If the select list exactly matches `table.*` order, the result type + becomes the generated record type (`Generated.ActualTypes.`). +5. **TypedQuery generation**: the quasiquoter emits a `TypedQuery` value + with: + - `PG.Query` containing the rewritten SQL + - `toField`-encoded parameters + - a row parser (`field` for single column, `fromRow` for tuples) + +At runtime, `runTyped` executes the generated query using IHP's +`ModelContext`, so it reuses the same logging, RLS parameters, and +connection pool as the rest of IHP. + ## Compile-time database access -`typedSql` talks to your database at compile time. It uses -`DATABASE_URL` or the same default used by IHP (`build/db`). Make sure -the database is running and the schema is up to date when compiling. +`typedSql` needs schema metadata at compile time. If +`IHP_TYPED_SQL_BOOTSTRAP` is set, it uses bootstrap mode. Otherwise it +connects to your live database. -If the schema changes, recompile so the query description is refreshed. +### Live database (default) -## Tests without a database: stub mode +`typedSql` connects to your database at compile time using `DATABASE_URL` +or the same default used by IHP (`build/db`). Make sure the database is +running and the schema is up to date when compiling. -For tests, you can skip the live DB by providing a JSON stub file. This -lets you run `typedSql` in CI without a running Postgres. +If the schema changes, recompile so the query description is refreshed. -Set the environment variable before splicing any `typedSql`: +### Bootstrap mode (schema-only) -```haskell -{-# LANGUAGE TemplateHaskell #-} +Bootstrap mode avoids a running database by creating a temporary local +Postgres instance from your SQL files at compile time. This keeps the +SQL/type checking fully real while remaining reproducible in CI. -import Language.Haskell.TH.Syntax (runIO) -import System.Environment (setEnv) +Enable it with: -$(do - runIO (setEnv "IHP_TYPED_SQL_STUB" "Test/Test/TypedSqlStub.json") - pure [] - ) +```bash +export IHP_TYPED_SQL_BOOTSTRAP=1 ``` -### Stub file format - -A stub file is a JSON document with a list of query entries. Each entry -contains: - -- the SQL string (after placeholder substitution) -- parameter OIDs -- column OIDs and table metadata -- table metadata (columns, PKs, FKs) -- type metadata (names, element OIDs, type category) - -Example: - -```json -{ - "queries": [ - { - "sql": "SELECT users.id, users.name FROM users WHERE users.id = $1", - "params": [2950], - "columns": [ - {"name": "id", "typeOid": 2950, "tableOid": 100000, "attnum": 1}, - {"name": "name", "typeOid": 25, "tableOid": 100000, "attnum": 2} - ], - "tables": [ - { - "oid": 100000, - "name": "users", - "columns": [ - {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, - {"attnum": 2, "name": "name", "typeOid": 25, "notNull": true} - ], - "primaryKeys": [1], - "foreignKeys": [] - } - ], - "types": [ - {"oid": 2950, "name": "uuid", "elemOid": null, "typtype": "b"}, - {"oid": 25, "name": "text", "elemOid": null, "typtype": "b"} - ] - } - ] -} -``` +When enabled, `typedSql` will: + +1. Run `initdb` into a temp directory. +2. Start a local `postgres` instance bound to a unix socket. +3. Load `IHPSchema.sql` (if found), then `Application/Schema.sql`. +4. Run the same describe + catalog queries against the temporary DB. + +Schema discovery rules: -Only the queries your tests use need to be present in the stub file. +- `IHP_TYPED_SQL_SCHEMA` overrides the app schema path. +- Otherwise, `Application/Schema.sql` is discovered by walking upward from + the module containing the `[typedSql| ... |]`. +- `IHP_TYPED_SQL_IHP_SCHEMA` overrides the IHP schema path. +- Otherwise, if `IHP_LIB` is set, `IHP_LIB/IHPSchema.sql` is used. +- Otherwise, it tries to locate `ihp-ide/data/IHPSchema.sql` when building + from the IHP repo. + +Tools required on `PATH`: + +- `initdb` +- `postgres` +- `createdb` +- `psql` + +If any tool is missing, `typedSql` will fail with a clear error. ## Limitations and gotchas @@ -268,7 +282,17 @@ You get compile-time SQL validation with minimal changes. **Error: could not connect to database** - Ensure `DATABASE_URL` is set and reachable during compilation. -- Or set `IHP_TYPED_SQL_STUB` to use stub mode. +- Or set `IHP_TYPED_SQL_BOOTSTRAP=1` to use bootstrap mode. + +**Error: bootstrap requires 'initdb' in PATH** + +- Install the PostgreSQL client/server tools. +- Make sure `initdb`, `postgres`, `createdb`, and `psql` are on `PATH`. + +**Error: could not find Application/Schema.sql** + +- Set `IHP_TYPED_SQL_SCHEMA` to an absolute path. +- Or ensure the module using `typedSql` is inside your app directory. **Error: placeholder count mismatch** diff --git a/ihp/IHP/TypedSql.hs b/ihp/IHP/TypedSql.hs index 12d678026..4d59b5b07 100644 --- a/ihp/IHP/TypedSql.hs +++ b/ihp/IHP/TypedSql.hs @@ -12,15 +12,15 @@ module IHP.TypedSql , runTypedOne -- execute a typed query expecting exactly one row ) where +import Control.Exception (bracket_) import Control.Monad (guard) -import Data.Aeson ((.!=), (.:), (.:?)) import qualified Data.Aeson as Aeson import qualified Data.ByteString as BS -import Data.IORef (IORef, atomicModifyIORef', - newIORef, readIORef) +import qualified Data.ByteString.Char8 as BS8 +import qualified Data.Char as Char import qualified Data.List as List import qualified Data.Map.Strict as Map -import Data.Maybe (mapMaybe) +import Data.Maybe (catMaybes, mapMaybe) import Data.Scientific (Scientific) import qualified Data.Set as Set import qualified Data.String.Conversions as CS @@ -28,7 +28,6 @@ import Data.Time (LocalTime, TimeOfDay, UTCTime) import Data.Time.Calendar (Day) import Data.UUID (UUID) -import Data.Word (Word32) import qualified Database.PostgreSQL.LibPQ as PQ import qualified Database.PostgreSQL.Simple as PG import qualified Database.PostgreSQL.Simple.FromRow as PGFR @@ -51,8 +50,18 @@ import qualified IHP.Postgres.Point as PGPoint import qualified IHP.Postgres.Polygon as PGPolygon import qualified IHP.Postgres.TimeParser as PGTime import qualified IHP.Postgres.TSVector as PGTs +import System.Directory (canonicalizePath, + createDirectoryIfMissing, + doesDirectoryExist, + doesFileExist, + findExecutable, + removeDirectoryRecursive) import System.Environment (lookupEnv) -import System.IO.Unsafe (unsafePerformIO) +import System.FilePath (isRelative, takeDirectory, + takeFileName, ()) +import System.IO (Handle, hIsEOF) +import System.IO.Temp (withSystemTempDirectory) +import qualified System.Process as Process -- | Prepared query with a custom row parser. -- High-level: this is the runtime value produced by the typed SQL quasiquoter. @@ -164,10 +173,13 @@ typedSqlExp rawSql = do let (sqlText, placeholderExprs) = substitutePlaceholders rawSql parsedExprs <- mapM parseExpr placeholderExprs -- parse each placeholder as Haskell code - stubPath <- TH.runIO (lookupEnv "IHP_TYPED_SQL_STUB") -- optional path to a stub file - describeResult <- TH.runIO $ case stubPath of - Just path -> describeUsingStub path sqlText -- offline: use stub metadata - Nothing -> describeStatement (CS.cs sqlText) -- online: ask Postgres to describe + bootstrapEnv <- TH.runIO (lookupEnv "IHP_TYPED_SQL_BOOTSTRAP") -- optional bootstrap mode + loc <- TH.location + let useBootstrap = isBootstrapEnabled bootstrapEnv + describeResult <- TH.runIO $ + if useBootstrap + then describeUsingBootstrap (TH.loc_filename loc) sqlText -- bootstrap DB from schema + else describeStatement (CS.cs sqlText) -- online: ask Postgres to describe let DescribeResult { drParams, drColumns, drTables, drTypes } = describeResult -- unpack metadata when (length drParams /= length parsedExprs) $ -- make sure counts match @@ -217,18 +229,168 @@ parseExpr exprText = Left err -> fail ("typedSql: failed to parse expression {" <> exprText <> "}: " <> err) -- parse error Right expr -> pure expr -- success: return parsed TH expression --- | Describe a statement using libpq, and fetch the additional metadata needed to map to Haskell types. --- High-level: loads a DescribeResult from a stub JSON file. -describeUsingStub :: FilePath -> String -> IO DescribeResult -describeUsingStub path sqlText = do - entries <- loadStubEntries path -- load and cache stub entries - let key = CS.cs sqlText -- use the SQL text as the lookup key - maybe (fail ("typedSql: no stub entry for SQL: " <> sqlText)) pure (Map.lookup key entries) -- lookup or fail +isBootstrapEnabled :: Maybe String -> Bool +isBootstrapEnabled = \case + Nothing -> False + Just raw -> + let value = map Char.toLower raw + in not (value `elem` ["", "0", "false", "no", "off"]) + +data BootstrapConfig = BootstrapConfig + { bcAppSchemaPath :: !FilePath + , bcIhpSchemaPath :: !(Maybe FilePath) + } + +data PgTools = PgTools + { pgInitdb :: !FilePath + , pgPostgres :: !FilePath + , pgCreatedb :: !FilePath + , pgPsql :: !FilePath + } + +describeUsingBootstrap :: FilePath -> String -> IO DescribeResult +describeUsingBootstrap sourcePath sqlText = do + config <- resolveBootstrapConfig sourcePath + withBootstrapDatabase config \dbUrl -> + describeStatementWith dbUrl (CS.cs sqlText) + +resolveBootstrapConfig :: FilePath -> IO BootstrapConfig +resolveBootstrapConfig sourcePath = do + sourceDir <- canonicalizePath (takeDirectory sourcePath) + appSchemaPath <- resolveSchemaPath sourceDir + ihpSchemaPath <- resolveIhpSchemaPath sourceDir + pure BootstrapConfig + { bcAppSchemaPath = appSchemaPath + , bcIhpSchemaPath = ihpSchemaPath + } + +resolveSchemaPath :: FilePath -> IO FilePath +resolveSchemaPath sourceDir = do + envSchema <- lookupEnv "IHP_TYPED_SQL_SCHEMA" + case envSchema of + Just path -> resolveRelativePath sourceDir path >>= ensureFileExists "IHP_TYPED_SQL_SCHEMA" + Nothing -> do + findUpwards sourceDir ("Application" "Schema.sql") >>= \case + Just found -> pure found + Nothing -> + fail "typedSql: could not find Application/Schema.sql. Set IHP_TYPED_SQL_SCHEMA to an absolute path." + +resolveIhpSchemaPath :: FilePath -> IO (Maybe FilePath) +resolveIhpSchemaPath sourceDir = do + envSchema <- lookupEnv "IHP_TYPED_SQL_IHP_SCHEMA" + case envSchema of + Just path -> Just <$> (resolveRelativePath sourceDir path >>= ensureFileExists "IHP_TYPED_SQL_IHP_SCHEMA") + Nothing -> do + envLib <- lookupEnv "IHP_LIB" + fromLib <- case envLib of + Just libPath -> do + let candidate = libPath "IHPSchema.sql" + exists <- doesFileExist candidate + pure (if exists then Just candidate else Nothing) + Nothing -> pure Nothing + case fromLib of + Just _ -> pure fromLib + Nothing -> findUpwards sourceDir ("ihp-ide" "data" "IHPSchema.sql") + +resolveRelativePath :: FilePath -> FilePath -> IO FilePath +resolveRelativePath baseDir path = do + let resolved = if isRelative path then baseDir path else path + canonicalizePath resolved + +ensureFileExists :: String -> FilePath -> IO FilePath +ensureFileExists label path = do + exists <- doesFileExist path + if exists + then pure path + else fail ("typedSql: " <> label <> " points to missing file: " <> path) + +findUpwards :: FilePath -> FilePath -> IO (Maybe FilePath) +findUpwards startDir relativePath = go startDir + where + go current = do + let candidate = current relativePath + exists <- doesFileExist candidate + if exists + then Just <$> canonicalizePath candidate + else do + let parent = takeDirectory current + if parent == current + then pure Nothing + else go parent + +withBootstrapDatabase :: BootstrapConfig -> (BS.ByteString -> IO a) -> IO a +withBootstrapDatabase BootstrapConfig { bcAppSchemaPath, bcIhpSchemaPath } action = do + PgTools { pgInitdb, pgPostgres, pgCreatedb, pgPsql } <- resolvePgTools + withSystemTempDirectory "ihp-typed-sql" \tempDir -> do + let dataDir = tempDir "state" + let socketDir = "/tmp" takeFileName tempDir + let cleanupSocket = do + exists <- doesDirectoryExist socketDir + when exists (removeDirectoryRecursive socketDir) + bracket_ (createDirectoryIfMissing True socketDir) cleanupSocket do + Process.callProcess pgInitdb [dataDir, "--no-locale", "--encoding", "UTF8"] + + let params = + (Process.proc pgPostgres ["-D", dataDir, "-k", socketDir, "-c", "listen_addresses="]) + { Process.std_in = Process.CreatePipe + , Process.std_out = Process.CreatePipe + , Process.std_err = Process.CreatePipe + } + Process.withCreateProcess params \_ _ stderrHandle processHandle -> do + errHandle <- maybe (fail "typedSql: unable to read postgres logs") pure stderrHandle + let stop = do + Process.terminateProcess processHandle + _ <- Process.waitForProcess processHandle + pure () + let start = do + waitUntilReady errHandle + Process.callProcess pgCreatedb ["app", "-h", socketDir] + let loadSchema file = Process.callProcess pgPsql ["-h", socketDir, "-d", "app", "-v", "ON_ERROR_STOP=1", "-f", file] + forM_ (catMaybes [bcIhpSchemaPath, Just bcAppSchemaPath]) loadSchema + bracket_ start stop do + let dbUrl = CS.cs ("postgresql:///app?host=" <> socketDir) + action dbUrl + +resolvePgTools :: IO PgTools +resolvePgTools = do + pgPostgres <- requireExecutable "postgres" + let binDir = takeDirectory pgPostgres + pgInitdb <- findInBinOrPath binDir "initdb" + pgCreatedb <- findInBinOrPath binDir "createdb" + pgPsql <- findInBinOrPath binDir "psql" + pure PgTools { pgInitdb, pgPostgres, pgCreatedb, pgPsql } + +findInBinOrPath :: FilePath -> String -> IO FilePath +findInBinOrPath binDir name = do + let candidate = binDir name + exists <- doesFileExist candidate + if exists then pure candidate else requireExecutable name + +requireExecutable :: String -> IO FilePath +requireExecutable name = + findExecutable name >>= \case + Just path -> pure path + Nothing -> fail ("typedSql: bootstrap requires '" <> name <> "' in PATH") + +waitUntilReady :: Handle -> IO () +waitUntilReady handle = do + done <- hIsEOF handle + if done + then fail "typedSql: postgres exited before it was ready" + else do + line <- BS8.hGetLine handle + if "database system is ready to accept connections" `BS8.isInfixOf` line + then pure () + else waitUntilReady handle -- Describe a statement by asking a real Postgres server. describeStatement :: BS.ByteString -> IO DescribeResult describeStatement sql = do dbUrl <- defaultDatabaseUrl -- read database URL + describeStatementWith dbUrl sql + +describeStatementWith :: BS.ByteString -> BS.ByteString -> IO DescribeResult +describeStatementWith dbUrl sql = do conn <- PQ.connectdb dbUrl -- open libpq connection status <- PQ.status conn -- check connection state unless (status == PQ.ConnectionOk) do @@ -262,13 +424,24 @@ describeStatement sql = do pgConn <- PG.connectPostgreSQL dbUrl -- open postgres-simple connection for catalog queries tables <- loadTableMeta pgConn (Set.toList tableOids) -- load table metadata + let referencedOids = + tables + |> Map.elems + |> foldl' + (\acc TableMeta { tmForeignKeys } -> + acc <> Set.fromList (Map.elems tmForeignKeys) + ) + mempty + let missingRefs = referencedOids `Set.difference` Map.keysSet tables + extraTables <- loadTableMeta pgConn (Set.toList missingRefs) + let tables' = tables <> extraTables types <- loadTypeInfo pgConn (Set.toList typeOids) -- load type metadata PG.close pgConn -- close postgres-simple connection _ <- PQ.exec conn ("DEALLOCATE " <> statementName) -- release prepared statement PQ.finish conn -- close libpq connection - pure DescribeResult { drParams = paramTypes, drColumns = columns, drTables = tables, drTypes = types } -- return full metadata + pure DescribeResult { drParams = paramTypes, drColumns = columns, drTables = tables', drTypes = types } -- return full metadata -- Ensure libpq returned a successful result. ensureOk :: String -> Maybe PQ.Result -> IO PQ.Result @@ -389,216 +562,6 @@ loadTypeInfo conn typeOids = do extras <- loadTypeInfo conn missing -- recursively load missing element types pure (typeMap <> extras) -- merge base and extra type info --- Stub metadata ------------------------------------------------------- - --- Cache mapping stub file path to parsed SQL metadata. -type StubCache = Map.Map FilePath (Map.Map Text DescribeResult) - -{-# NOINLINE describeStubCache #-} -- ensure the IORef is shared and not duplicated --- Global cache for stub metadata (safe because file contents are immutable). -describeStubCache :: IORef StubCache -describeStubCache = unsafePerformIO (newIORef mempty) -- initialize to empty - --- Load stub entries from cache or parse if missing. -loadStubEntries :: FilePath -> IO (Map.Map Text DescribeResult) -loadStubEntries path = do - cache <- readIORef describeStubCache -- read current cache - case Map.lookup path cache of - Just entries -> pure entries -- cache hit - Nothing -> do - entries <- parseStubFile path -- parse the stub file - atomicModifyIORef' describeStubCache (\m -> (Map.insert path entries m, ())) -- store in cache - pure entries - --- Parse a stub file on disk into DescribeResult values. -parseStubFile :: FilePath -> IO (Map.Map Text DescribeResult) -parseStubFile path = do - bytes <- BS.readFile path -- read JSON file - stubFile <- either (fail . ("typedSql: failed to parse stub file: " <>) ) pure (Aeson.eitherDecodeStrict' bytes :: Either String DescribeStubFile) -- decode JSON - pure (buildStubEntries stubFile) -- build lookup map - --- Convert a DescribeStubFile into a map keyed by SQL text. -buildStubEntries :: DescribeStubFile -> Map.Map Text DescribeResult -buildStubEntries DescribeStubFile { stubFileQueries } = - foldl' - (\acc entry -> - Map.insert (stubEntrySql entry) (stubEntryToDescribe entry) acc - ) - mempty - stubFileQueries - --- Top-level JSON structure for stub metadata. -data DescribeStubFile = DescribeStubFile - { stubFileQueries :: ![DescribeStubEntry] -- list of stubbed statements - } - -instance Aeson.FromJSON DescribeStubFile where - parseJSON = Aeson.withObject "DescribeStubFile" \obj -> - DescribeStubFile <$> obj .:? "queries" .!= [] -- default to empty list - --- One stubbed SQL statement entry. -data DescribeStubEntry = DescribeStubEntry - { stubEntrySql :: !Text -- SQL text used as lookup key - , stubEntryParams :: ![Word32] -- parameter type OIDs - , stubEntryColumns :: ![DescribeStubColumn] -- result columns - , stubEntryTables :: ![DescribeStubTable] -- table metadata - , stubEntryTypes :: ![DescribeStubType] -- type metadata - } - -instance Aeson.FromJSON DescribeStubEntry where - parseJSON = Aeson.withObject "DescribeStubEntry" \obj -> - DescribeStubEntry - <$> obj .: "sql" -- required SQL string - <*> obj .:? "params" .!= [] -- optional params list - <*> obj .:? "columns" .!= [] -- optional columns list - <*> obj .:? "tables" .!= [] -- optional tables list - <*> obj .:? "types" .!= [] -- optional types list - --- Column description inside a stub entry. -data DescribeStubColumn = DescribeStubColumn - { stubColumnName :: !Text -- column name - , stubColumnType :: !Word32 -- type OID - , stubColumnTable :: !Word32 -- table OID (0 if none) - , stubColumnAttnum :: !(Maybe Int) -- attribute number, if known - } - -instance Aeson.FromJSON DescribeStubColumn where - parseJSON = Aeson.withObject "DescribeStubColumn" \obj -> - DescribeStubColumn - <$> obj .: "name" -- required name - <*> obj .: "typeOid" -- required type OID - <*> obj .:? "tableOid" .!= 0 -- default to 0 (no table) - <*> obj .:? "attnum" -- optional attnum - --- Table description inside a stub entry. -data DescribeStubTable = DescribeStubTable - { stubTableOid :: !Word32 -- table OID - , stubTableName :: !Text -- table name - , stubTableColumns :: ![DescribeStubTableColumn] -- table columns - , stubTablePrimaryKeys :: ![Int] -- primary key attnums - , stubTableForeignKeys :: ![DescribeStubForeignKey] -- foreign keys - } - -instance Aeson.FromJSON DescribeStubTable where - parseJSON = Aeson.withObject "DescribeStubTable" \obj -> - DescribeStubTable - <$> obj .: "oid" -- required OID - <*> obj .: "name" -- required table name - <*> obj .:? "columns" .!= [] -- optional columns list - <*> obj .:? "primaryKeys" .!= [] -- optional PK list - <*> obj .:? "foreignKeys" .!= [] -- optional FK list - --- Column description inside a stubbed table. -data DescribeStubTableColumn = DescribeStubTableColumn - { stubTableColumnAttnum :: !Int -- attribute number in table - , stubTableColumnName :: !Text -- column name - , stubTableColumnType :: !Word32 -- type OID - , stubTableColumnNotNull :: !Bool -- NOT NULL flag - } - -instance Aeson.FromJSON DescribeStubTableColumn where - parseJSON = Aeson.withObject "DescribeStubTableColumn" \obj -> - DescribeStubTableColumn - <$> obj .: "attnum" -- required attribute number - <*> obj .: "name" -- required column name - <*> obj .: "typeOid" -- required type OID - <*> obj .:? "notNull" .!= False -- default false for nullable - --- Foreign key description inside a stubbed table. -data DescribeStubForeignKey = DescribeStubForeignKey - { stubForeignKeyAttnum :: !Int -- local column attnum - , stubForeignKeyReferences :: !Word32 -- referenced table OID - } - -instance Aeson.FromJSON DescribeStubForeignKey where - parseJSON = Aeson.withObject "DescribeStubForeignKey" \obj -> - DescribeStubForeignKey - <$> obj .: "attnum" -- required local attnum - <*> obj .: "references" -- required referenced table OID - --- Type description inside a stub entry. -data DescribeStubType = DescribeStubType - { stubTypeOid :: !Word32 -- type OID - , stubTypeName :: !Text -- type name - , stubTypeElemOid :: !(Maybe Word32) -- element type OID for arrays - , stubTypeTyptype :: !(Maybe Text) -- typtype as text - , stubTypeNamespace :: !(Maybe Text) -- namespace name - } - -instance Aeson.FromJSON DescribeStubType where - parseJSON = Aeson.withObject "DescribeStubType" \obj -> - DescribeStubType - <$> obj .: "oid" -- required OID - <*> obj .: "name" -- required type name - <*> obj .:? "elemOid" -- optional element OID - <*> obj .:? "typtype" -- optional typtype - <*> obj .:? "namespace" -- optional namespace - --- Convert a stub entry into a DescribeResult. -stubEntryToDescribe :: DescribeStubEntry -> DescribeResult -stubEntryToDescribe DescribeStubEntry { .. } = - DescribeResult - { drParams = map oidFromWord stubEntryParams -- convert param OIDs - , drColumns = map stubColumnToDescribe stubEntryColumns -- convert columns - , drTables = Map.fromList (map stubTableToDescribe stubEntryTables) -- convert tables - , drTypes = Map.fromList (map stubTypeToDescribe stubEntryTypes) -- convert types - } - --- Convert a stub column into a DescribeColumn. -stubColumnToDescribe :: DescribeStubColumn -> DescribeColumn -stubColumnToDescribe DescribeStubColumn { .. } = - DescribeColumn - { dcName = CS.cs stubColumnName -- convert Text to ByteString - , dcType = oidFromWord stubColumnType -- convert type OID - , dcTable = oidFromWord stubColumnTable -- convert table OID - , dcAttnum = stubColumnAttnum -- keep attribute number - } - --- Convert a stub table into TableMeta. -stubTableToDescribe :: DescribeStubTable -> (PQ.Oid, TableMeta) -stubTableToDescribe DescribeStubTable { .. } = - ( tableOid - , TableMeta - { tmOid = tableOid - , tmName = stubTableName - , tmColumns = Map.fromList (map (\col -> (stubTableColumnAttnum col, stubColumnMeta col)) stubTableColumns) - , tmColumnOrder = map stubTableColumnAttnum stubTableColumns - , tmPrimaryKeys = Set.fromList stubTablePrimaryKeys - , tmForeignKeys = Map.fromList (map (\fk -> (stubForeignKeyAttnum fk, oidFromWord (stubForeignKeyReferences fk))) stubTableForeignKeys) - } - ) - where - tableOid = oidFromWord stubTableOid -- convert table OID - stubColumnMeta DescribeStubTableColumn { .. } = ColumnMeta - { cmAttnum = stubTableColumnAttnum - , cmName = stubTableColumnName - , cmTypeOid = oidFromWord stubTableColumnType - , cmNotNull = stubTableColumnNotNull - } - --- Convert a stub type into PgTypeInfo. -stubTypeToDescribe :: DescribeStubType -> (PQ.Oid, PgTypeInfo) -stubTypeToDescribe DescribeStubType { .. } = - ( oid - , PgTypeInfo - { ptiOid = oid - , ptiName = stubTypeName - , ptiElem = stubTypeElemOid >>= nonZeroOid - , ptiType = stubTypeTyptype >>= extractTyptype - , ptiNamespace = stubTypeNamespace - } - ) - where - oid = oidFromWord stubTypeOid -- convert type OID - extractTyptype text = case CS.cs text :: String of - (c:_) -> Just c -- take the first character - _ -> Nothing -- empty or invalid typtype - nonZeroOid w = if w == 0 then Nothing else Just (oidFromWord w) -- ignore 0 sentinel - --- Convert a Word32 from JSON into a libpq Oid. -oidFromWord :: Word32 -> PQ.Oid -oidFromWord = PQ.Oid . fromIntegral -- convert numeric width - -- | Build the Haskell type for a parameter, based on its OID. -- High-level: map a PG type OID into a TH Type. hsTypeForParam :: Map.Map PQ.Oid PgTypeInfo -> PQ.Oid -> TH.TypeQ diff --git a/ihp/Test/Test/TypedSqlSchema.sql b/ihp/Test/Test/TypedSqlSchema.sql new file mode 100644 index 000000000..542c02de7 --- /dev/null +++ b/ihp/Test/Test/TypedSqlSchema.sql @@ -0,0 +1,11 @@ +CREATE TABLE users ( + id uuid PRIMARY KEY, + name text NOT NULL, + email text NOT NULL +); + +CREATE TABLE posts ( + id uuid PRIMARY KEY, + author_id uuid REFERENCES users(id), + slug text NOT NULL +); diff --git a/ihp/Test/Test/TypedSqlSpec.hs b/ihp/Test/Test/TypedSqlSpec.hs index c55899ea0..e781783e6 100644 --- a/ihp/Test/Test/TypedSqlSpec.hs +++ b/ihp/Test/Test/TypedSqlSpec.hs @@ -11,14 +11,17 @@ import IHP.ModelSupport import IHP.Prelude import IHP.TypedSql import qualified Language.Haskell.TH.Syntax as TH +import System.Directory (canonicalizePath) import System.Environment (setEnv) import System.FilePath (takeDirectory, ()) import Test.Hspec $(do loc <- TH.location - let stubPath = takeDirectory (TH.loc_filename loc) "TypedSqlStub.json" - TH.runIO (setEnv "IHP_TYPED_SQL_STUB" stubPath) + let baseDir = takeDirectory (TH.loc_filename loc) + schemaPath <- TH.runIO (canonicalizePath (baseDir "TypedSqlSchema.sql")) + TH.runIO (setEnv "IHP_TYPED_SQL_BOOTSTRAP" "1") + TH.runIO (setEnv "IHP_TYPED_SQL_SCHEMA" schemaPath) pure [] ) diff --git a/ihp/Test/Test/TypedSqlStub.json b/ihp/Test/Test/TypedSqlStub.json deleted file mode 100644 index f85625a59..000000000 --- a/ihp/Test/Test/TypedSqlStub.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "queries": [ - { - "sql": "SELECT users.id, users.name FROM users WHERE users.id = $1", - "params": [2950], - "columns": [ - {"name": "id", "typeOid": 2950, "tableOid": 100000, "attnum": 1}, - {"name": "name", "typeOid": 25, "tableOid": 100000, "attnum": 2} - ], - "tables": [ - { - "oid": 100000, - "name": "users", - "columns": [ - {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, - {"attnum": 2, "name": "name", "typeOid": 25, "notNull": true}, - {"attnum": 3, "name": "email", "typeOid": 25, "notNull": true} - ], - "primaryKeys": [1], - "foreignKeys": [] - } - ], - "types": [ - {"oid": 2950, "name": "uuid", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"}, - {"oid": 25, "name": "text", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"} - ] - }, - { - "sql": "SELECT posts.author_id FROM posts WHERE posts.slug = $1", - "params": [25], - "columns": [ - {"name": "author_id", "typeOid": 2950, "tableOid": 100001, "attnum": 3} - ], - "tables": [ - { - "oid": 100001, - "name": "posts", - "columns": [ - {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, - {"attnum": 2, "name": "title", "typeOid": 25, "notNull": true}, - {"attnum": 3, "name": "author_id", "typeOid": 2950, "notNull": false} - ], - "primaryKeys": [1], - "foreignKeys": [ - {"attnum": 3, "references": 100000} - ] - }, - { - "oid": 100000, - "name": "users", - "columns": [ - {"attnum": 1, "name": "id", "typeOid": 2950, "notNull": true}, - {"attnum": 2, "name": "name", "typeOid": 25, "notNull": true}, - {"attnum": 3, "name": "email", "typeOid": 25, "notNull": true} - ], - "primaryKeys": [1], - "foreignKeys": [] - } - ], - "types": [ - {"oid": 2950, "name": "uuid", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"}, - {"oid": 25, "name": "text", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"} - ] - }, - { - "sql": "SELECT COUNT(*) FROM posts", - "params": [], - "columns": [ - {"name": "count", "typeOid": 20, "tableOid": 0, "attnum": null} - ], - "tables": [], - "types": [ - {"oid": 20, "name": "int8", "elemOid": null, "typtype": "b", "namespace": "pg_catalog"} - ] - } - ] -} diff --git a/ihp/ihp.cabal b/ihp/ihp.cabal index 4114cf937..db041f6ab 100644 --- a/ihp/ihp.cabal +++ b/ihp/ihp.cabal @@ -38,7 +38,9 @@ data-files: , static/vendor/src-min/*.js , static/vendor/src-min/snippets/*.js -extra-source-files: CHANGELOG.md +extra-source-files: + CHANGELOG.md + Test/Test/TypedSqlSchema.sql common shared-properties default-language: GHC2021 @@ -48,6 +50,7 @@ common shared-properties , mono-traversable , transformers , directory + , filepath , string-conversions , warp , warp-systemd From ac6f97273fae6b98c0b6a21cc9909781c0c3e33a Mon Sep 17 00:00:00 2001 From: vcombey Date: Thu, 1 Jan 2026 18:15:29 +0100 Subject: [PATCH 05/13] try fix --- NixSupport/overlay.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NixSupport/overlay.nix b/NixSupport/overlay.nix index 3e66af016..0e13cd08b 100644 --- a/NixSupport/overlay.nix +++ b/NixSupport/overlay.nix @@ -7,7 +7,7 @@ final: prev: { overrides = self: super: let filter = inputs.nix-filter.lib; - localPackage = name: super.callCabal2nix name (filter { root = "${toString flakeRoot}/${name}"; include = [ (filter.matchExt "hs") (filter.matchExt "cabal") (filter.matchExt "md") (filter.matchExt "json") filter.isDirectory "LICENSE" "data" ]; }) {}; + localPackage = name: super.callCabal2nix name (filter { root = "${toString flakeRoot}/${name}"; include = [ (filter.matchExt "hs") (filter.matchExt "cabal") (filter.matchExt "md") (filter.matchExt "json") (filter.matchExt "sql") filter.isDirectory "LICENSE" "data" ]; }) {}; in { ihp = localPackage "ihp"; ihp-ide = localPackage "ihp-ide"; From b4cec272e9982414adb434f14caa2d64c1b8173d Mon Sep 17 00:00:00 2001 From: vcombey Date: Thu, 1 Jan 2026 20:24:12 +0100 Subject: [PATCH 06/13] remove test --- NixSupport/overlay.nix | 2 +- ihp/Test/Test/Main.hs | 1 - ihp/Test/Test/TypedSqlSchema.sql | 11 ------- ihp/Test/Test/TypedSqlSpec.hs | 54 -------------------------------- ihp/ihp.cabal | 1 - 5 files changed, 1 insertion(+), 68 deletions(-) delete mode 100644 ihp/Test/Test/TypedSqlSchema.sql delete mode 100644 ihp/Test/Test/TypedSqlSpec.hs diff --git a/NixSupport/overlay.nix b/NixSupport/overlay.nix index 0e13cd08b..3e66af016 100644 --- a/NixSupport/overlay.nix +++ b/NixSupport/overlay.nix @@ -7,7 +7,7 @@ final: prev: { overrides = self: super: let filter = inputs.nix-filter.lib; - localPackage = name: super.callCabal2nix name (filter { root = "${toString flakeRoot}/${name}"; include = [ (filter.matchExt "hs") (filter.matchExt "cabal") (filter.matchExt "md") (filter.matchExt "json") (filter.matchExt "sql") filter.isDirectory "LICENSE" "data" ]; }) {}; + localPackage = name: super.callCabal2nix name (filter { root = "${toString flakeRoot}/${name}"; include = [ (filter.matchExt "hs") (filter.matchExt "cabal") (filter.matchExt "md") (filter.matchExt "json") filter.isDirectory "LICENSE" "data" ]; }) {}; in { ihp = localPackage "ihp"; ihp-ide = localPackage "ihp-ide"; diff --git a/ihp/Test/Test/Main.hs b/ihp/Test/Test/Main.hs index 14399bfe0..c93fd121d 100644 --- a/ihp/Test/Test/Main.hs +++ b/ihp/Test/Test/Main.hs @@ -15,7 +15,6 @@ import qualified Test.NameSupportSpec import qualified Test.PGListenerSpec import qualified Test.QueryBuilderSpec import qualified Test.RouterSupportSpec -import qualified Test.TypedSqlSpec import qualified Test.ValidationSupport.ValidateFieldSpec import qualified Test.View.CSSFrameworkSpec import qualified Test.View.FormSpec diff --git a/ihp/Test/Test/TypedSqlSchema.sql b/ihp/Test/Test/TypedSqlSchema.sql deleted file mode 100644 index 542c02de7..000000000 --- a/ihp/Test/Test/TypedSqlSchema.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE users ( - id uuid PRIMARY KEY, - name text NOT NULL, - email text NOT NULL -); - -CREATE TABLE posts ( - id uuid PRIMARY KEY, - author_id uuid REFERENCES users(id), - slug text NOT NULL -); diff --git a/ihp/Test/Test/TypedSqlSpec.hs b/ihp/Test/Test/TypedSqlSpec.hs deleted file mode 100644 index e781783e6..000000000 --- a/ihp/Test/Test/TypedSqlSpec.hs +++ /dev/null @@ -1,54 +0,0 @@ -{-# LANGUAGE QuasiQuotes #-} -{-# LANGUAGE TemplateHaskell #-} -{-| -Module: Test.TypedSqlSpec --} -module Test.TypedSqlSpec where - -import qualified Data.ByteString.Char8 as ByteString -import Database.PostgreSQL.Simple.Types (Query (..)) -import IHP.ModelSupport -import IHP.Prelude -import IHP.TypedSql -import qualified Language.Haskell.TH.Syntax as TH -import System.Directory (canonicalizePath) -import System.Environment (setEnv) -import System.FilePath (takeDirectory, ()) -import Test.Hspec - -$(do - loc <- TH.location - let baseDir = takeDirectory (TH.loc_filename loc) - schemaPath <- TH.runIO (canonicalizePath (baseDir "TypedSqlSchema.sql")) - TH.runIO (setEnv "IHP_TYPED_SQL_BOOTSTRAP" "1") - TH.runIO (setEnv "IHP_TYPED_SQL_SCHEMA" schemaPath) - pure [] - ) - --- Define primary keys for the tables referenced in the stub metadata - -type instance PrimaryKey "users" = UUID -type instance PrimaryKey "posts" = UUID - -tests :: Spec -tests = describe "TypedSql" do - it "infers Id and Text columns" do - let userId :: UUID - userId = "11111111-1111-1111-1111-111111111111" - typed :: TypedQuery (Id' "users", Text) - typed = [typedSql|SELECT users.id, users.name FROM users WHERE users.id = ${userId}|] - Query sqlBytes = tqQuery typed - sqlBytes `shouldBe` ByteString.pack "SELECT users.id, users.name FROM users WHERE users.id = $1" - - it "maps nullable foreign keys to Maybe Id'" do - let slug :: Text - slug = "hello-world" - typed :: TypedQuery (Maybe (Id' "users")) - typed = [typedSql|SELECT posts.author_id FROM posts WHERE posts.slug = ${slug}|] - length (tqParams typed) `shouldBe` 1 - - it "infers aggregate columns" do - let typed :: TypedQuery (Maybe Integer) - typed = [typedSql|SELECT COUNT(*) FROM posts|] - Query sqlBytes = tqQuery typed - sqlBytes `shouldBe` ByteString.pack "SELECT COUNT(*) FROM posts" diff --git a/ihp/ihp.cabal b/ihp/ihp.cabal index db041f6ab..9312f79ae 100644 --- a/ihp/ihp.cabal +++ b/ihp/ihp.cabal @@ -40,7 +40,6 @@ data-files: , static/vendor/src-min/snippets/*.js extra-source-files: CHANGELOG.md - Test/Test/TypedSqlSchema.sql common shared-properties default-language: GHC2021 From 95bfa7a62f5a84c36d250ccd5c41141be1eb994d Mon Sep 17 00:00:00 2001 From: vcombey Date: Thu, 1 Jan 2026 21:17:31 +0100 Subject: [PATCH 07/13] fix --- ihp/ihp.cabal | 1 - 1 file changed, 1 deletion(-) diff --git a/ihp/ihp.cabal b/ihp/ihp.cabal index 9312f79ae..71b141865 100644 --- a/ihp/ihp.cabal +++ b/ihp/ihp.cabal @@ -275,4 +275,3 @@ test-suite tests Test.Controller.CookieSpec Test.PGListenerSpec Test.AutoRefreshSpec - Test.TypedSqlSpec From 67f4b7562d5c4c878b553316423a1c916deb0008 Mon Sep 17 00:00:00 2001 From: vcombey Date: Fri, 2 Jan 2026 12:31:24 +0100 Subject: [PATCH 08/13] fix expr inside pgtyped --- ihp/IHP/TypedSql.hs | 68 +++++++++++++++++++++++++++++---------------- ihp/ihp.cabal | 1 + 2 files changed, 45 insertions(+), 24 deletions(-) diff --git a/ihp/IHP/TypedSql.hs b/ihp/IHP/TypedSql.hs index 4d59b5b07..3e68e519a 100644 --- a/ihp/IHP/TypedSql.hs +++ b/ihp/IHP/TypedSql.hs @@ -169,17 +169,17 @@ toPQOid (PG.Oid w) = PQ.Oid w -- low-level: wrap the same numeric value -- Build the TH expression for a typed SQL quasiquote. typedSqlExp :: String -> TH.ExpQ typedSqlExp rawSql = do - -- Replace ${expr} placeholders with $1, $2, ... and collect expressions. - let (sqlText, placeholderExprs) = substitutePlaceholders rawSql - parsedExprs <- mapM parseExpr placeholderExprs -- parse each placeholder as Haskell code + -- Replace ${expr} placeholders with $1, $2, ... for describe and ? for runtime. + let PlaceholderPlan { ppDescribeSql, ppRuntimeSql, ppExprs } = planPlaceholders rawSql + parsedExprs <- mapM parseExpr ppExprs -- parse each placeholder as Haskell code bootstrapEnv <- TH.runIO (lookupEnv "IHP_TYPED_SQL_BOOTSTRAP") -- optional bootstrap mode loc <- TH.location let useBootstrap = isBootstrapEnabled bootstrapEnv describeResult <- TH.runIO $ if useBootstrap - then describeUsingBootstrap (TH.loc_filename loc) sqlText -- bootstrap DB from schema - else describeStatement (CS.cs sqlText) -- online: ask Postgres to describe + then describeUsingBootstrap (TH.loc_filename loc) ppDescribeSql -- bootstrap DB from schema + else describeStatement (CS.cs ppDescribeSql) -- online: ask Postgres to describe let DescribeResult { drParams, drColumns, drTables, drTypes } = describeResult -- unpack metadata when (length drParams /= length parsedExprs) $ -- make sure counts match @@ -192,28 +192,48 @@ typedSqlExp rawSql = do resultType <- hsTypeForColumns drTypes drTables drColumns -- compute result type from columns - let sqlLiteral = TH.SigE (TH.LitE (TH.StringL sqlText)) (TH.ConT ''String) + let sqlLiteral = TH.SigE (TH.LitE (TH.StringL ppRuntimeSql)) (TH.ConT ''String) queryExpr = TH.AppE (TH.ConE 'PG.Query) (TH.AppE (TH.VarE 'CS.cs) sqlLiteral) rowParserExpr = if length drColumns == 1 then TH.VarE 'PGFR.field else TH.VarE 'PGFR.fromRow typedQueryExpr = - TH.RecConE - 'TypedQuery - [ (TH.mkName "tqQuery", queryExpr) -- build query text - , (TH.mkName "tqParams", TH.ListE (map (TH.AppE (TH.VarE 'PGTF.toField)) annotatedParams)) -- encode params - , (TH.mkName "tqRowParser", rowParserExpr) -- parse single column or full row - ] + TH.AppE + (TH.AppE + (TH.AppE + (TH.ConE 'TypedQuery) + queryExpr + ) + (TH.ListE (map (TH.AppE (TH.VarE 'PGTF.toField)) annotatedParams)) + ) + rowParserExpr pure (TH.SigE typedQueryExpr (TH.AppT (TH.ConT ''TypedQuery) resultType)) -- add overall type signature --- | Replace ${expr} placeholders with PostgreSQL-style $1 placeholders. --- High-level: turns a templated SQL string into a PG-ready SQL string plus expr list. -substitutePlaceholders :: String -> (String, [String]) -substitutePlaceholders = go 1 "" [] where - go _ acc exprs [] = (reverse acc, reverse exprs) -- done: reverse accumulators - go n acc exprs ('$':'{':rest) = +data PlaceholderPlan = PlaceholderPlan + { ppDescribeSql :: !String + , ppRuntimeSql :: !String + , ppExprs :: ![String] + } + +-- | Replace ${expr} placeholders with PostgreSQL-style $1 for describe and ? for runtime. +-- High-level: turns a templated SQL string into SQL strings plus expr list. +planPlaceholders :: String -> PlaceholderPlan +planPlaceholders = go 1 "" "" [] where + go _ accDescribe accRuntime exprs [] = + PlaceholderPlan + { ppDescribeSql = reverse accDescribe + , ppRuntimeSql = reverse accRuntime + , ppExprs = reverse exprs + } + go n accDescribe accRuntime exprs ('$':'{':rest) = let (expr, after) = breakOnClosing 0 "" rest -- parse until matching } - in go (n + 1) (reverse ('$' : CS.cs (show n)) <> acc) (expr : exprs) after -- replace with $n - go n acc exprs (c:rest) = go n (c : acc) exprs rest -- copy non-placeholder chars + describeToken = reverse ('$' : CS.cs (show n)) + in go (n + 1) + (describeToken <> accDescribe) + ('?' : accRuntime) + (expr : exprs) + after + go n accDescribe accRuntime exprs (c:rest) = + go n (c : accDescribe) (c : accRuntime) exprs rest breakOnClosing depth acc [] = (reverse acc, []) -- no closing brace found breakOnClosing depth acc ('{':xs) = breakOnClosing (depth + 1) ('{':acc) xs -- nested { increases depth @@ -575,7 +595,7 @@ hsTypeForColumns :: Map.Map PQ.Oid PgTypeInfo -> Map.Map PQ.Oid TableMeta -> [De hsTypeForColumns typeInfo tables cols = do case detectFullTable tables cols of Just tableName -> - pure (TH.ConT (TH.mkName (CS.cs ("Generated.ActualTypes." <> tableNameToModelName tableName)))) -- use model type + pure (TH.ConT (TH.mkName (CS.cs (tableNameToModelName tableName)))) -- use model type Nothing -> do hsCols <- mapM (hsTypeForColumn typeInfo tables) cols -- map each column to a type case hsCols of @@ -662,8 +682,8 @@ hsTypeForPg typeInfo nullable PgTypeInfo { ptiName, ptiElem, ptiType } = do _ | ptiName == "tsvector" -> pure (TH.ConT ''PGTs.TSVector) -- full-text search vector _ | ptiName == "interval" -> pure (TH.ConT ''PGTime.PGInterval) -- interval type _ | ptiType == Just 'e' -> - pure (TH.ConT (TH.mkName (CS.cs ("Generated.Enums." <> tableNameToModelName ptiName)))) -- enum type + pure (TH.ConT (TH.mkName (CS.cs (tableNameToModelName ptiName)))) -- enum type _ | ptiType == Just 'c' -> - pure (TH.ConT (TH.mkName (CS.cs ("Generated.ActualTypes." <> tableNameToModelName ptiName)))) -- composite type - _ -> pure (TH.ConT (TH.mkName (CS.cs ("Generated.ActualTypes." <> tableNameToModelName ptiName)))) -- fallback to generated type + pure (TH.ConT (TH.mkName (CS.cs (tableNameToModelName ptiName)))) -- composite type + _ -> pure (TH.ConT (TH.mkName (CS.cs (tableNameToModelName ptiName)))) -- fallback to generated type pure (wrapNull nullable base) -- apply nullability wrapper diff --git a/ihp/ihp.cabal b/ihp/ihp.cabal index 71b141865..c8feeb7fa 100644 --- a/ihp/ihp.cabal +++ b/ihp/ihp.cabal @@ -201,6 +201,7 @@ library , IHP.ModelSupport , IHP.NameSupport , IHP.QueryBuilder + , IHP.Postgres.Typed , IHP.TypedSql , IHP.Fetch , IHP.RouterPrelude From 454acf0b1844fb77b2ca1ca078d7e1fac7b82029 Mon Sep 17 00:00:00 2001 From: vcombey Date: Fri, 2 Jan 2026 17:10:54 +0100 Subject: [PATCH 09/13] fix composite query --- Guide/typed-sql.markdown | 19 ++++++++++++------- ihp/IHP/TypedSql.hs | 21 ++++++++++++++++++--- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/Guide/typed-sql.markdown b/Guide/typed-sql.markdown index d1c47c894..8db16ac2b 100644 --- a/Guide/typed-sql.markdown +++ b/Guide/typed-sql.markdown @@ -155,8 +155,12 @@ The mapping follows IHP's conventions. Summary of common types: - `polygon` -> `Polygon` - `inet` -> `IP` - `tsvector` -> `TSVector` -- enums -> `Generated.Enums.` -- composite types -> `Generated.ActualTypes.` +- enums -> `` (re-exported from `Generated.Types`) +- composite types -> `` (re-exported from `Generated.Types`) + +Single-column composite selects (e.g. `SELECT my_table FROM my_table`) are +not supported because `postgresql-simple` cannot decode composite fields +into record types. Use `SELECT my_table.*` or list columns explicitly. If you have custom types, add a `FromField` instance and extend `hsTypeForPg` in `IHP.TypedSql`. @@ -177,8 +181,9 @@ There is no separate runtime connection layer. looks like this: 1. **Placeholder rewrite**: the SQL template is scanned for `${expr}` - placeholders. Each placeholder is replaced by `$1`, `$2`, ... and the - captured expressions are parsed as Haskell AST. + placeholders. Each placeholder is replaced by `$1`, `$2`, ... for the + compile-time describe and by `?` for runtime execution. The captured + expressions are parsed as Haskell AST. 2. **Statement describe**: at compile time, `typedSql` prepares the query and runs `DESCRIBE` via libpq. This returns: - parameter OIDs (types for each `$N`) @@ -192,10 +197,10 @@ looks like this: 4. **IHP type mapping**: - Primary keys become `Id' "table"`. - Single-column foreign keys become `Id' "ref_table"`. - - Enums map to `Generated.Enums.`. - - Composite types map to `Generated.ActualTypes.`. + - Enums map to `` (re-exported from `Generated.Types`). + - Composite types map to `` (re-exported from `Generated.Types`). - If the select list exactly matches `table.*` order, the result type - becomes the generated record type (`Generated.ActualTypes.`). + becomes the generated record type (`` from `Generated.Types`). 5. **TypedQuery generation**: the quasiquoter emits a `TypedQuery` value with: - `PG.Query` containing the rewritten SQL diff --git a/ihp/IHP/TypedSql.hs b/ihp/IHP/TypedSql.hs index 3e68e519a..adcb79406 100644 --- a/ihp/IHP/TypedSql.hs +++ b/ihp/IHP/TypedSql.hs @@ -13,7 +13,7 @@ module IHP.TypedSql ) where import Control.Exception (bracket_) -import Control.Monad (guard) +import Control.Monad (guard, when) import qualified Data.Aeson as Aeson import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BS8 @@ -194,7 +194,18 @@ typedSqlExp rawSql = do let sqlLiteral = TH.SigE (TH.LitE (TH.StringL ppRuntimeSql)) (TH.ConT ''String) queryExpr = TH.AppE (TH.ConE 'PG.Query) (TH.AppE (TH.VarE 'CS.cs) sqlLiteral) - rowParserExpr = if length drColumns == 1 then TH.VarE 'PGFR.field else TH.VarE 'PGFR.fromRow + isCompositeColumn = + case drColumns of + [DescribeColumn { dcType }] -> + case Map.lookup dcType drTypes of + Just PgTypeInfo { ptiType = Just 'c' } -> True + _ -> False + _ -> False + when (length drColumns == 1 && isCompositeColumn) $ + fail + ("typedSql: composite columns must be expanded (use SELECT table.* " + <> "or list columns explicitly)") + let rowParserExpr = if length drColumns == 1 then TH.VarE 'PGFR.field else TH.VarE 'PGFR.fromRow typedQueryExpr = TH.AppE (TH.AppE @@ -434,7 +445,11 @@ describeStatementWith dbUrl sql = do tableOid <- PQ.ftable desc colIndex -- table OID for the column attnumRaw <- PQ.ftablecol desc colIndex -- attribute number in table let PQ.Col attnumCInt = attnumRaw - let attnum = if tableOid == PQ.Oid 0 then Nothing else Just (fromIntegral attnumCInt) -- ignore attnum when table is 0 + let attnumInt = fromIntegral attnumCInt :: Int + let attnum = + if tableOid == PQ.Oid 0 || attnumInt <= 0 + then Nothing -- no reliable column info for composites/expressions + else Just attnumInt pure DescribeColumn { dcName = name, dcType = colType, dcTable = tableOid, dcAttnum = attnum } -- build column meta ) [0 .. columnCountInt - 1] From 9328f3f09ef8123c5332c3745cf2595d0e771fda Mon Sep 17 00:00:00 2001 From: vcombey Date: Fri, 2 Jan 2026 19:00:19 +0100 Subject: [PATCH 10/13] coerce --- Guide/typed-sql.markdown | 6 ++++-- ihp/IHP/Postgres/Typed.hs | 22 ++++++++++++++++++++++ ihp/IHP/TypedSql.hs | 6 +++++- 3 files changed, 31 insertions(+), 3 deletions(-) create mode 100644 ihp/IHP/Postgres/Typed.hs diff --git a/Guide/typed-sql.markdown b/Guide/typed-sql.markdown index 8db16ac2b..ef0eb2922 100644 --- a/Guide/typed-sql.markdown +++ b/Guide/typed-sql.markdown @@ -107,8 +107,10 @@ runTyped [typedSql| Notes: - Do not use `?` or `$1` placeholders directly. -- Parameter types come from OIDs only, so UUID parameters are `UUID` (not - `Id'`). Use `get #id record` or `unId` if you want to pass an `Id'`. +- Parameter types come from OIDs only, so UUID parameters are typed as + `UUID`. Placeholders are coerced, so passing an `Id` works too. The + database does not report which table a parameter belongs to, so any + `Id` with a matching primary key type will typecheck. - Use explicit type annotations for ambiguous values: ```haskell diff --git a/ihp/IHP/Postgres/Typed.hs b/ihp/IHP/Postgres/Typed.hs new file mode 100644 index 000000000..eba13b130 --- /dev/null +++ b/ihp/IHP/Postgres/Typed.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE ImplicitParams #-} + +module IHP.Postgres.Typed + ( pgSQL + , pgQuery + , pgQueryOne + ) where + +import IHP.ModelSupport (ModelContext) +import IHP.Prelude +import IHP.TypedSql (TypedQuery, runTyped, runTypedOne, + typedSql) +import Language.Haskell.TH.Quote (QuasiQuoter) + +pgSQL :: QuasiQuoter +pgSQL = typedSql + +pgQuery :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] +pgQuery = runTyped + +pgQueryOne :: (?modelContext :: ModelContext) => TypedQuery result -> IO result +pgQueryOne = runTypedOne diff --git a/ihp/IHP/TypedSql.hs b/ihp/IHP/TypedSql.hs index adcb79406..bac220b8d 100644 --- a/ihp/IHP/TypedSql.hs +++ b/ihp/IHP/TypedSql.hs @@ -18,6 +18,7 @@ import qualified Data.Aeson as Aeson import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BS8 import qualified Data.Char as Char +import Data.Coerce (coerce) import qualified Data.List as List import qualified Data.Map.Strict as Map import Data.Maybe (catMaybes, mapMaybe) @@ -188,7 +189,10 @@ typedSqlExp rawSql = do paramTypes <- mapM (hsTypeForParam drTypes) drParams -- map param OIDs to Haskell types let annotatedParams = - zipWith (\expr paramTy -> TH.SigE expr paramTy) parsedExprs paramTypes -- add type sigs to args + zipWith + (\expr paramTy -> TH.SigE (TH.AppE (TH.VarE 'coerce) expr) paramTy) + parsedExprs + paramTypes -- coerce placeholder expressions into expected param types resultType <- hsTypeForColumns drTypes drTables drColumns -- compute result type from columns From 723fb1921d462013b42326e0f54e1ed24e62589e Mon Sep 17 00:00:00 2001 From: vcombey Date: Sat, 3 Jan 2026 00:13:23 +0100 Subject: [PATCH 11/13] seems to work fine --- Guide/typed-sql.markdown | 49 +++--- ihp/IHP/Postgres/Typed.hs | 9 +- ihp/IHP/TypedSql.hs | 319 ++++++++++++++++++++++++++++++++++++-- 3 files changed, 336 insertions(+), 41 deletions(-) diff --git a/Guide/typed-sql.markdown b/Guide/typed-sql.markdown index ef0eb2922..73202507f 100644 --- a/Guide/typed-sql.markdown +++ b/Guide/typed-sql.markdown @@ -33,7 +33,7 @@ Enable Template Haskell and QuasiQuotes in a module where you want to use {-# LANGUAGE DataKinds #-} ``` -Import the module and run the query using `runTyped`: +Import the module and run the query using `sqlQueryTyped`: ```haskell module Web.Controller.Users where @@ -44,7 +44,7 @@ import IHP.TypedSql indexAction :: ?modelContext => IO () indexAction = do let userId = "00000000-0000-0000-0000-000000000001" :: Id User - users <- runTyped [typedSql| + users <- sqlQueryTyped [typedSql| SELECT users.id, users.name FROM users WHERE users.id = ${userId} @@ -54,7 +54,7 @@ indexAction = do ``` `typedSql` produces a `TypedQuery` value that is executed using -`runTyped` or `runTypedOne`. +`sqlQueryTyped`. If you expect a single row, pattern match on the result list. ## Result type inference @@ -65,7 +65,7 @@ the schema (e.g. `SELECT users.*`), then the result is the generated record type: ```haskell -user :: User <- runTypedOne [typedSql| +[user] <- sqlQueryTyped [typedSql| SELECT users.* FROM users WHERE users.id = ${userId} |] ``` @@ -75,7 +75,7 @@ user :: User <- runTypedOne [typedSql| When you return a subset of columns, the result is a tuple: ```haskell -userInfo :: [(Id User, Text)] <- runTyped [typedSql| +userInfo :: [(Id User, Text)] <- sqlQueryTyped [typedSql| SELECT users.id, users.name FROM users |] ``` @@ -86,7 +86,7 @@ If a column is a single-column foreign key, `typedSql` maps it to `Id' "other_table"` automatically: ```haskell -authorIds :: [Maybe (Id User)] <- runTyped [typedSql| +authorIds :: [Maybe (Id User)] <- sqlQueryTyped [typedSql| SELECT posts.author_id FROM posts WHERE posts.slug = ${slug} |] ``` @@ -99,7 +99,7 @@ This follows IHP's usual `Id` mapping rules. parameter and is type-checked against the database. ```haskell -runTyped [typedSql| +sqlQueryTyped [typedSql| SELECT * FROM posts WHERE posts.id = ${postId} |] ``` @@ -107,14 +107,18 @@ runTyped [typedSql| Notes: - Do not use `?` or `$1` placeholders directly. -- Parameter types come from OIDs only, so UUID parameters are typed as - `UUID`. Placeholders are coerced, so passing an `Id` works too. The - database does not report which table a parameter belongs to, so any - `Id` with a matching primary key type will typecheck. +- Parameter types come from OIDs by default, so UUID parameters are typed + as `UUID`. Placeholders are coerced, so passing an `Id` works too. +- When a placeholder is compared to a table-qualified column + (e.g. `cv_setups.id = ${...}`) or to an unqualified column in a + single-table query, `typedSql` treats the parameter as the column's + Haskell type (including `Id` for PK/FK columns). This lets you pass + `Id CvSetup` or `UUID`, and rejects `Id` values from other tables. + Use explicit table/alias qualification for best results in joins. - Use explicit type annotations for ambiguous values: ```haskell -runTyped [typedSql| +sqlQueryTyped [typedSql| SELECT * FROM posts WHERE posts.score > ${10 :: Int} |] ``` @@ -131,7 +135,7 @@ If you want to force a non-null result, use SQL functions such as `COALESCE`: ```haskell -runTyped [typedSql| +sqlQueryTyped [typedSql| SELECT COALESCE(posts.title, '') FROM posts |] ``` @@ -169,7 +173,7 @@ If you have custom types, add a `FromField` instance and extend ## Runtime behavior -`runTyped` uses IHP's `ModelContext`, so it automatically: +`sqlQueryTyped` uses IHP's `ModelContext`, so it automatically: - uses the pooled connection - respects row-level security (RLS) @@ -209,7 +213,7 @@ looks like this: - `toField`-encoded parameters - a row parser (`field` for single column, `fromRow` for tuples) -At runtime, `runTyped` executes the generated query using IHP's +At runtime, `sqlQueryTyped` executes the generated query using IHP's `ModelContext`, so it reuses the same logging, RLS parameters, and connection pool as the rest of IHP. @@ -280,10 +284,19 @@ If you currently use `sqlQuery` for complex queries: 1. Wrap the query in `[typedSql| ... |]`. 2. Replace `?` placeholders with `${expr}`. 3. Replace custom `FromRow` with inferred tuples or records. -4. Use `runTyped` instead of `sqlQuery`. +4. Use `sqlQueryTyped` instead of `sqlQuery`. You get compile-time SQL validation with minimal changes. +If you currently use `sqlExec` for statements: + +1. Wrap the statement in `[typedSql| ... |]`. +2. Replace `?` placeholders with `${expr}`. +3. Use `sqlExecTyped` instead of `sqlExec`. + +If you want rows back from an `INSERT`/`UPDATE`/`DELETE`, add a `RETURNING` +clause and use `sqlQueryTyped`. + ## Troubleshooting **Error: could not connect to database** @@ -317,8 +330,8 @@ You get compile-time SQL validation with minimal changes. ```haskell typedSql :: QuasiQuoter -runTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] -runTypedOne :: (?modelContext :: ModelContext) => TypedQuery result -> IO result +sqlQueryTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] +sqlExecTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO Int64 ``` See `IHP.TypedSql` for the full implementation. diff --git a/ihp/IHP/Postgres/Typed.hs b/ihp/IHP/Postgres/Typed.hs index eba13b130..d18eee514 100644 --- a/ihp/IHP/Postgres/Typed.hs +++ b/ihp/IHP/Postgres/Typed.hs @@ -3,20 +3,15 @@ module IHP.Postgres.Typed ( pgSQL , pgQuery - , pgQueryOne ) where import IHP.ModelSupport (ModelContext) import IHP.Prelude -import IHP.TypedSql (TypedQuery, runTyped, runTypedOne, - typedSql) +import IHP.TypedSql (TypedQuery, sqlQueryTyped, typedSql) import Language.Haskell.TH.Quote (QuasiQuoter) pgSQL :: QuasiQuoter pgSQL = typedSql pgQuery :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] -pgQuery = runTyped - -pgQueryOne :: (?modelContext :: ModelContext) => TypedQuery result -> IO result -pgQueryOne = runTypedOne +pgQuery = sqlQueryTyped diff --git a/ihp/IHP/TypedSql.hs b/ihp/IHP/TypedSql.hs index bac220b8d..36e245dd2 100644 --- a/ihp/IHP/TypedSql.hs +++ b/ihp/IHP/TypedSql.hs @@ -8,8 +8,8 @@ module IHP.TypedSql ( typedSql -- expose the SQL quasiquoter entry point , TypedQuery (..) -- expose the query container and its fields - , runTyped -- execute a typed query returning all rows - , runTypedOne -- execute a typed query expecting exactly one row + , sqlQueryTyped -- execute a typed query returning all rows + , sqlExecTyped -- execute a typed statement and return affected rows ) where import Control.Exception (bracket_) @@ -25,6 +25,7 @@ import Data.Maybe (catMaybes, mapMaybe) import Data.Scientific (Scientific) import qualified Data.Set as Set import qualified Data.String.Conversions as CS +import qualified Data.Text as Text import Data.Time (LocalTime, TimeOfDay, UTCTime) import Data.Time.Calendar (Day) @@ -79,8 +80,8 @@ instance PGTR.ToRow PreparedRow where -- | Run a typed query and return all rows. -- High-level: delegates to postgres-simple with logging and RLS params. -runTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] -runTyped TypedQuery { tqQuery, tqParams, tqRowParser } = +sqlQueryTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO [result] +sqlQueryTyped TypedQuery { tqQuery, tqParams, tqRowParser } = withDatabaseConnection \connection -> -- obtain a DB connection from the model context withRLSParams -- apply row-level security parameters if configured (\query params -> @@ -92,15 +93,20 @@ runTyped TypedQuery { tqQuery, tqParams, tqRowParser } = tqQuery -- the SQL to execute (PreparedRow tqParams) -- wrap params to match ToRow --- | Run a typed query that is expected to return a single row. --- High-level: enforces exactly-one-row semantics. -runTypedOne :: (?modelContext :: ModelContext) => TypedQuery result -> IO result -runTypedOne typed = do - rows <- runTyped typed -- execute query and collect rows - case rows of - [row] -> pure row -- success: exactly one row - [] -> error "runTypedOne: expected exactly one row but got none" -- error on zero rows - _ -> error ("runTypedOne: expected a single row but got " <> show (length rows)) -- error on too many rows +-- | Run a typed statement (INSERT/UPDATE/DELETE) and return affected row count. +-- High-level: mirrors 'sqlExec' but keeps typed SQL parameters. +sqlExecTyped :: (?modelContext :: ModelContext) => TypedQuery result -> IO Int64 +sqlExecTyped TypedQuery { tqQuery, tqParams } = + withDatabaseConnection \connection -> -- obtain a DB connection from the model context + withRLSParams -- apply row-level security parameters if configured + (\query params -> + measureTimeIfLogging "💾" connection -- measure/log query runtime with a label + (PG.execute connection query (PreparedRow params)) -- execute without returning rows + query -- log the SQL query + (PreparedRow params) -- log the query parameters + ) + tqQuery -- the SQL to execute + (PreparedRow tqParams) -- wrap params to match ToRow -- * Template Haskell quasiquoter @@ -188,9 +194,18 @@ typedSqlExp rawSql = do paramTypes <- mapM (hsTypeForParam drTypes) drParams -- map param OIDs to Haskell types + let sqlTokens = tokenizeSql ppDescribeSql + let aliasMap = buildAliasMap sqlTokens + let paramHints = collectParamHints sqlTokens aliasMap + paramHintTypes <- resolveParamHintTypes drTables drTypes paramHints + let annotatedParams = - zipWith - (\expr paramTy -> TH.SigE (TH.AppE (TH.VarE 'coerce) expr) paramTy) + zipWith3 + (\index expr paramTy -> + let expectedType = fromMaybe paramTy (Map.lookup index paramHintTypes) + in TH.SigE (TH.AppE (TH.VarE 'coerce) expr) expectedType + ) + [1..] parsedExprs paramTypes -- coerce placeholder expressions into expected param types @@ -209,7 +224,11 @@ typedSqlExp rawSql = do fail ("typedSql: composite columns must be expanded (use SELECT table.* " <> "or list columns explicitly)") - let rowParserExpr = if length drColumns == 1 then TH.VarE 'PGFR.field else TH.VarE 'PGFR.fromRow + let rowParserExpr = + case length drColumns of + 0 -> TH.AppE (TH.VarE 'pure) (TH.ConE '()) + 1 -> TH.VarE 'PGFR.field + _ -> TH.VarE 'PGFR.fromRow typedQueryExpr = TH.AppE (TH.AppE @@ -257,6 +276,274 @@ planPlaceholders = go 1 "" "" [] where | otherwise = breakOnClosing (depth - 1) ('}':acc) xs -- close a nested brace breakOnClosing depth acc (x:xs) = breakOnClosing depth (x:acc) xs -- accumulate placeholder chars +data SqlToken + = TokIdent !Text + | TokSymbol !Char + | TokParam !Int + deriving (Eq, Show) + +data ParamHint = ParamHint + { phIndex :: !Int + , phTable :: !Text + , phColumn :: !Text + , phArray :: !Bool + } + deriving (Eq, Show) + +tokenizeSql :: String -> [SqlToken] +tokenizeSql = go [] where + go acc [] = reverse acc + go acc ('-':'-':rest) = go acc (dropLineComment rest) + go acc ('/':'*':rest) = go acc (dropBlockComment rest) + go acc ('\'':rest) = go acc (dropStringLiteral rest) + go acc ('"':rest) = + let (ident, remaining) = parseQuotedIdent rest + in go (TokIdent ident : acc) remaining + go acc ('$':rest) = + let (digits, remaining) = span Char.isDigit rest + in if null digits + then go acc remaining + else go (TokParam (digitsToInt digits) : acc) remaining + go acc (c:rest) + | Char.isSpace c = go acc rest + | isIdentStart c = + let (identTail, remaining) = span isIdentChar rest + identText = Text.toLower (CS.cs (c : identTail)) + in go (TokIdent identText : acc) remaining + | isSymbolToken c = go (TokSymbol c : acc) rest + | otherwise = go acc rest + + isIdentStart ch = Char.isLetter ch || ch == '_' + isIdentChar ch = Char.isAlphaNum ch || ch == '_' || ch == '$' + isSymbolToken ch = ch `elem` ['.', '=', '(', ')', ','] + + dropLineComment = dropWhile (/= '\n') + dropBlockComment = dropUntil "*/" + dropStringLiteral = dropSingleQuoted + + dropUntil _ [] = [] + dropUntil pattern@(p1:p2:_) (x:y:rest) + | x == p1 && y == p2 = rest + | otherwise = dropUntil pattern (y:rest) + dropUntil _ rest = rest + + dropSingleQuoted [] = [] + dropSingleQuoted ('\'':'\'':xs) = dropSingleQuoted xs + dropSingleQuoted ('\'':xs) = xs + dropSingleQuoted (_:xs) = dropSingleQuoted xs + + parseQuotedIdent = go "" where + go acc [] = (Text.toLower (CS.cs (reverse acc)), []) + go acc ('"':'"':xs) = go ('"':acc) xs + go acc ('"':xs) = (Text.toLower (CS.cs (reverse acc)), xs) + go acc (x:xs) = go (x:acc) xs + +digitsToInt :: String -> Int +digitsToInt = foldl' (\acc digit -> acc * 10 + Char.digitToInt digit) 0 + +tokenAtIndex :: [a] -> Int -> Maybe a +tokenAtIndex xs ix = + case List.drop ix xs of + (value:_) -> Just value + [] -> Nothing + +reservedKeywords :: Set.Set Text +reservedKeywords = + Set.fromList (map Text.pack + [ "as", "where", "join", "inner", "left", "right", "full", "cross" + , "on", "group", "order", "limit", "offset", "having", "union" + , "intersect", "except", "returning", "set", "values", "from", "update" + , "delete", "insert", "select" + ]) + +clauseKeywords :: Set.Set Text +clauseKeywords = Set.fromList (map Text.pack ["from", "join", "update", "into"]) + +buildAliasMap :: [SqlToken] -> Map.Map Text Text +buildAliasMap tokens = go tokens Map.empty where + go [] acc = acc + go (TokIdent keyword : rest) acc + | keyword `Set.member` clauseKeywords = + case parseTable rest of + Nothing -> go rest acc + Just (tableName, afterTable) -> + let (alias, afterAlias) = parseAlias afterTable + acc' = Map.insert tableName tableName acc + acc'' = maybe acc' (\name -> Map.insert name tableName acc') alias + in go afterAlias acc'' + | otherwise = go rest acc + go (_:rest) acc = go rest acc + + parseTable (TokIdent _schemaName : TokSymbol '.' : TokIdent tableName : rest) = + Just (tableName, rest) + parseTable (TokIdent tableName : rest) = + Just (tableName, rest) + parseTable _ = Nothing + + parseAlias (TokIdent aliasKeyword : TokIdent alias : rest) + | aliasKeyword == Text.pack "as" = (Just alias, rest) + parseAlias (TokIdent alias : rest) + | alias `Set.notMember` reservedKeywords = (Just alias, rest) + parseAlias rest = (Nothing, rest) + +collectParamHints :: [SqlToken] -> Map.Map Text Text -> Map.Map Int ParamHint +collectParamHints tokens aliasMap = + let defaultTable = singleTable aliasMap + in tokens + |> zip [0..] + |> mapMaybe (hintForToken aliasMap defaultTable) + |> foldl' mergeHints Map.empty + |> Map.mapMaybe id + where + tokenAt ix + | ix < 0 = Nothing + | otherwise = tokenAtIndex tokens ix + + singleTable aliases = + case Set.toList (Set.fromList (Map.elems aliases)) of + [table] -> Just table + _ -> Nothing + + hasDotBefore ix = + case tokenAt (ix - 1) of + Just (TokSymbol '.') -> True + _ -> False + + hasDotAfter ix = + case tokenAt (ix + 1) of + Just (TokSymbol '.') -> True + _ -> False + + hintForToken aliases defaultTable (ix, TokParam index) = + let matches = catMaybes + [ matchEqRight aliases ix index + , matchEqLeft aliases ix index + , matchInRight aliases ix index + , matchAnyRight aliases ix index + , matchEqRightUnqualified defaultTable ix index + , matchEqLeftUnqualified defaultTable ix index + , matchInRightUnqualified defaultTable ix index + , matchAnyRightUnqualified defaultTable ix index + ] + in listToMaybe matches + hintForToken _ _ _ = Nothing + + matchEqRight aliases ix index = do + TokSymbol '=' <- tokenAt (ix - 1) + TokIdent column <- tokenAt (ix - 2) + TokSymbol '.' <- tokenAt (ix - 3) + TokIdent tableRef <- tokenAt (ix - 4) + tableName <- Map.lookup tableRef aliases + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = False } + + matchEqRightUnqualified defaultTable ix index = do + tableName <- defaultTable + TokSymbol '=' <- tokenAt (ix - 1) + TokIdent column <- tokenAt (ix - 2) + guard (not (hasDotBefore (ix - 2))) + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = False } + + matchEqLeft aliases ix index = do + TokSymbol '=' <- tokenAt (ix + 1) + TokIdent tableRef <- tokenAt (ix + 2) + TokSymbol '.' <- tokenAt (ix + 3) + TokIdent column <- tokenAt (ix + 4) + tableName <- Map.lookup tableRef aliases + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = False } + + matchEqLeftUnqualified defaultTable ix index = do + tableName <- defaultTable + TokSymbol '=' <- tokenAt (ix + 1) + TokIdent column <- tokenAt (ix + 2) + guard (not (hasDotAfter (ix + 2))) + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = False } + + matchInRight aliases ix index = do + TokSymbol '(' <- tokenAt (ix - 1) + TokIdent keyword <- tokenAt (ix - 2) + guard (keyword == Text.pack "in") + TokIdent column <- tokenAt (ix - 3) + TokSymbol '.' <- tokenAt (ix - 4) + TokIdent tableRef <- tokenAt (ix - 5) + tableName <- Map.lookup tableRef aliases + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = True } + + matchInRightUnqualified defaultTable ix index = do + tableName <- defaultTable + TokSymbol '(' <- tokenAt (ix - 1) + TokIdent keyword <- tokenAt (ix - 2) + guard (keyword == Text.pack "in") + TokIdent column <- tokenAt (ix - 3) + guard (not (hasDotBefore (ix - 3))) + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = True } + + matchAnyRight aliases ix index = do + TokSymbol '(' <- tokenAt (ix - 1) + TokIdent keyword <- tokenAt (ix - 2) + guard (keyword == Text.pack "any") + TokSymbol '=' <- tokenAt (ix - 3) + TokIdent column <- tokenAt (ix - 4) + TokSymbol '.' <- tokenAt (ix - 5) + TokIdent tableRef <- tokenAt (ix - 6) + tableName <- Map.lookup tableRef aliases + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = True } + + matchAnyRightUnqualified defaultTable ix index = do + tableName <- defaultTable + TokSymbol '(' <- tokenAt (ix - 1) + TokIdent keyword <- tokenAt (ix - 2) + guard (keyword == Text.pack "any") + TokSymbol '=' <- tokenAt (ix - 3) + TokIdent column <- tokenAt (ix - 4) + guard (not (hasDotBefore (ix - 4))) + pure ParamHint { phIndex = index, phTable = tableName, phColumn = column, phArray = True } + + mergeHints acc hint = + Map.alter (mergeHint hint) (phIndex hint) acc + + mergeHint hint Nothing = Just (Just hint) + mergeHint hint (Just Nothing) = Just Nothing + mergeHint hint (Just (Just existing)) + | existing == hint = Just (Just existing) + | otherwise = Just Nothing + +resolveParamHintTypes :: Map.Map PQ.Oid TableMeta -> Map.Map PQ.Oid PgTypeInfo -> Map.Map Int ParamHint -> TH.Q (Map.Map Int TH.Type) +resolveParamHintTypes tables typeInfo hints = do + let tablesByName = tables + |> Map.toList + |> mapMaybe (\(oid, table@TableMeta { tmName }) -> Just (tmName, (oid, table))) + |> Map.fromList + resolved <- mapM (resolveHint tablesByName) (Map.toList hints) + pure (Map.fromList (catMaybes resolved)) + where + resolveHint tablesByName (index, ParamHint { phTable, phColumn, phArray }) = do + case Map.lookup phTable tablesByName of + Nothing -> pure Nothing + Just (tableOid, table@TableMeta { tmColumns }) -> + case findColumn tmColumns phColumn of + Nothing -> pure Nothing + Just (attnum, ColumnMeta { cmTypeOid }) -> do + baseType <- hsTypeForColumn typeInfo tables DescribeColumn + { dcName = CS.cs phColumn + , dcType = cmTypeOid + , dcTable = tableOid + , dcAttnum = Just attnum + } + let stripped = stripMaybeType baseType + let hintedType = if phArray then TH.AppT TH.ListT stripped else stripped + pure (Just (index, hintedType)) + + findColumn columns columnName = + columns + |> Map.toList + |> List.find (\(_, ColumnMeta { cmName }) -> Text.toLower cmName == Text.toLower columnName) + |> fmap (\(attnum, column) -> (attnum, column)) + +stripMaybeType :: TH.Type -> TH.Type +stripMaybeType (TH.AppT (TH.ConT maybeName) inner) + | maybeName == ''Maybe = inner +stripMaybeType other = other + -- Parse a placeholder expression into TH. parseExpr :: String -> TH.ExpQ parseExpr exprText = From 6a0b4f6c49c09cc52807439f0d96e19075540bd9 Mon Sep 17 00:00:00 2001 From: vcombey Date: Sat, 17 Jan 2026 22:15:02 +0100 Subject: [PATCH 12/13] auto refresh htmx poc --- Guide/auto-refresh.markdown | 132 ++++++++++++ ihp/IHP/AutoRefresh.hs | 81 ++++++- ihp/IHP/AutoRefresh/Types.hs | 2 + ihp/IHP/AutoRefresh/View.hs | 4 +- ihp/Test/AutoRefreshSpec.hs | 67 ++++++ ihp/Test/AutoRefreshSpecMain.hs | 9 + ihp/Test/Main.hs | 89 ++++++++ ihp/data/static/ihp-auto-refresh.js | 320 +++++++++++++++++++++++----- 8 files changed, 641 insertions(+), 63 deletions(-) create mode 100644 ihp/Test/AutoRefreshSpec.hs create mode 100644 ihp/Test/AutoRefreshSpecMain.hs create mode 100644 ihp/Test/Main.hs diff --git a/Guide/auto-refresh.markdown b/Guide/auto-refresh.markdown index f6b04d653..89d0d2fd6 100644 --- a/Guide/auto-refresh.markdown +++ b/Guide/auto-refresh.markdown @@ -92,3 +92,135 @@ action StatsAction = autoRefresh do ``` The [`trackTableRead`](https://ihp.digitallyinduced.com/api-docs/IHP-ModelSupport.html#v:trackTableRead) marks the table as accessed for Auto Refresh and leads to the table being watched. + +### Using Auto Refresh with HTMX + +HTMX endpoints often render just a fragment and swap it into an existing container. Auto Refresh can cooperate with that flow as long as the client knows which element to morph and the fragment exposes the session meta data. You can use multiple Auto Refresh-powered HTMX fragments on one page as long as each swap target has its own stable `id`. + +Auto Refresh decides which DOM node to update by looking at a target selector stored on the meta tag: + +- If the meta tag has `data-ihp-auto-refresh-target`, that selector is used. +- Otherwise, after an HTMX swap, the client uses the swap target `id` (from `htmx:afterSwap`) and treats it as `#id`. +- If neither is available, Auto Refresh falls back to the full page, which is usually not what you want for fragments. + +In practice: + +1. Wrap the HTMX action in `autoRefresh`. +2. Include `{autoRefreshMeta}` inside the fragment that HTMX swaps in, or omit it and let Auto Refresh inject it automatically. The meta tag can be anywhere in the fragment; the client moves it into `` after the swap. +3. Give the swap target a stable `id` so Auto Refresh can infer `#id`. If the target has no `id`, Auto Refresh will generate one in the browser (e.g. `ihp-auto-refresh-target-1`). If you want a different selector, set it explicitly with [`setAutoRefreshTarget`](https://ihp.digitallyinduced.com/api-docs/IHP-AutoRefresh.html#v:setAutoRefreshTarget). +4. Keep the container stable (e.g. the same `id`) so morphdom can update its children without losing your `hx-*` attributes. + +#### Example 1: Basic fragment swap (no setAutoRefreshTarget) + +```haskell +-- Controller +action RefineChatPaneAction { chatId } = autoRefresh do + messages <- query @Message + |> filterWhere (#chatId, chatId) + |> orderByDesc #createdAt + |> fetch + render RefineChatPaneView { .. } + +-- View +instance View RefineChatPaneView where + html RefineChatPaneView { .. } = [hsx| + {autoRefreshMeta} + {forEach messages renderMessage} + |] +``` + +On the page you can keep your skeleton loader and HTMX setup. Because HTMX swaps into `
`, the `htmx:afterSwap` handler derives the target selector `#chat-pane` automatically: + +```haskell +[hsx| +
+ {skeleton} +
+|] +``` + +After HTMX swaps in the fragment, the Auto Refresh client moves the meta tag into ``, reuses the session id, reconnects the WebSocket, and limits updates to `#chat-pane`. Avoid rendering another `#chat-pane` inside the fragment when using `hx-swap="innerHTML"`, or you will end up with duplicate `id` values. + +#### Example 2: No `id` on the swap target (use setAutoRefreshTarget) + +If the HTMX target is selected by class or some other selector, Auto Refresh cannot infer the target. Set it explicitly: + +```haskell +-- Controller +action SidebarAction = autoRefresh do + setAutoRefreshTarget ".sidebar-pane" + items <- query @Item |> fetch + render SidebarView { .. } + +-- View +instance View SidebarView where + html SidebarView { .. } = [hsx| + {autoRefreshMeta} + {forEach items renderItem} + |] +``` + +```haskell +[hsx| + +|] +``` + +#### Example 3: Outer swap (fragment includes the container) + +If you want the fragment to include the wrapper, use `hx-swap="outerHTML"`: + +```haskell +-- View +instance View RefineChatPaneView where + html RefineChatPaneView { .. } = [hsx| + {autoRefreshMeta} +
+ {forEach messages renderMessage} +
+ |] +``` + +```haskell +[hsx| +
+ {skeleton} +
+|] +``` + +#### Example 4: Multiple fragments on one page + +Each fragment has its own target `id` and its own Auto Refresh session: + +```haskell +[hsx| +
+
+|] +``` + +```haskell +-- RefineChatPaneView +[hsx|{autoRefreshMeta}{forEach messages renderMessage}|] + +-- ActivityPaneView +[hsx|{autoRefreshMeta}{forEach activities renderActivity}|] +``` diff --git a/ihp/IHP/AutoRefresh.hs b/ihp/IHP/AutoRefresh.hs index 5c9cc8dbc..dc3653b35 100644 --- a/ihp/IHP/AutoRefresh.hs +++ b/ihp/IHP/AutoRefresh.hs @@ -7,18 +7,23 @@ module IHP.AutoRefresh where import IHP.Prelude import IHP.AutoRefresh.Types +import IHP.AutoRefresh.View (autoRefreshMeta) import IHP.ControllerSupport import qualified Data.UUID.V4 as UUID import qualified Data.UUID as UUID import IHP.Controller.Session import qualified Network.Wai.Internal as Wai import qualified Data.Binary.Builder as ByteString +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as LBS import qualified Data.Set as Set import IHP.ModelSupport import qualified Control.Exception as Exception import qualified Control.Concurrent.MVar as MVar import qualified Data.Maybe as Maybe import qualified Data.Text as Text +import qualified Data.Text.Encoding as TextEncoding +import qualified Data.Text.Encoding.Error as TextEncodingError import IHP.WebSocket import IHP.Controller.Context import IHP.Controller.Response @@ -29,11 +34,18 @@ import qualified IHP.Log as Log import qualified Data.Vault.Lazy as Vault import System.IO.Unsafe (unsafePerformIO) import Network.Wai +import Network.HTTP.Types.Header (hContentType) +import qualified Text.Blaze.Html.Renderer.Text as BlazeText initAutoRefresh :: (?context :: ControllerContext) => IO () initAutoRefresh = do putContext AutoRefreshDisabled +-- | Limits the client-side morphing to the DOM node matching the given CSS selector. +-- Useful when combining Auto Refresh with fragment based renderers such as HTMX. +setAutoRefreshTarget :: (?context :: ControllerContext) => Text -> IO () +setAutoRefreshTarget selector = putContext (AutoRefreshTarget selector) + autoRefresh :: ( ?theAction :: action , Controller action @@ -88,7 +100,12 @@ autoRefresh runAction = do -- it will render a 'error "JSON not implemented"'. After this curl request -- all future HTML requests to the current action will fail with a 503. -- - lastResponse <- Exception.evaluate (ByteString.toLazyByteString builder) + rawResponse <- Exception.evaluate (ByteString.toLazyByteString builder) + lastResponse <- ensureAutoRefreshMeta headers rawResponse + let response' = + if lastResponse == rawResponse + then response + else Wai.ResponseBuilder status headers (ByteString.fromLazyByteString lastResponse) event <- MVar.newEmptyMVar let session = AutoRefreshSession { id, renderView, event, tables, lastResponse, lastPing } @@ -97,7 +114,7 @@ autoRefresh runAction = do registerNotificationTrigger ?touchedTables autoRefreshServer - throw exception + throw (ResponseException response') _ -> error "Unimplemented WAI response type." runAction `Exception.catch` handleResponse @@ -121,7 +138,8 @@ instance WSApp AutoRefreshWSApp where let handleResponseException (ResponseException response) = case response of Wai.ResponseBuilder status headers builder -> do - let html = ByteString.toLazyByteString builder + rawHtml <- pure (ByteString.toLazyByteString builder) + html <- ensureAutoRefreshMeta headers rawHtml Log.info ("AutoRefresh: inner = " <> show (status, headers, builder) <> " END") @@ -255,6 +273,61 @@ notificationTrigger tableName = PG.Query [i| updateTriggerName = "ar_did_update_" <> tableName deleteTriggerName = "ar_did_delete_" <> tableName +ensureAutoRefreshMeta :: (?context :: ControllerContext) => ResponseHeaders -> LByteString -> IO LByteString +ensureAutoRefreshMeta headers html + | not (isHtmlResponse headers) = pure html + | LBS.isInfixOf "ihp-auto-refresh-id" html = pure html + | otherwise = do + meta <- renderAutoRefreshMeta + if LBS.null meta + then pure html + else pure (insertAutoRefreshMeta meta html) + +renderAutoRefreshMeta :: (?context :: ControllerContext) => IO LByteString +renderAutoRefreshMeta = do + frozenContext <- freeze ?context + let ?context = frozenContext + let metaText = BlazeText.renderHtml autoRefreshMeta + pure (encodeUtf8Text metaText) + +insertAutoRefreshMeta :: LByteString -> LByteString -> LByteString +insertAutoRefreshMeta meta html = + let metaText = decodeUtf8Text meta + htmlText = decodeUtf8Text html + resultText = insertMetaText metaText htmlText + in encodeUtf8Text resultText + +insertMetaText :: Text -> Text -> Text +insertMetaText meta html = + case findHeadInsertionIndex html of + Just index -> Text.take index html <> meta <> Text.drop index html + Nothing -> meta <> html + +findHeadInsertionIndex :: Text -> Maybe Int +findHeadInsertionIndex html = + let lowerHtml = Text.toLower html + (beforeHead, rest) = Text.breakOn "') rest + in if Text.null afterHead + then Nothing + else + let headOpenLen = Text.length rest - Text.length afterHead + in Just (Text.length beforeHead + headOpenLen + 1) + +isHtmlResponse :: ResponseHeaders -> Bool +isHtmlResponse headers = case lookup hContentType headers of + Just value -> "text/html" `BS.isPrefixOf` value + Nothing -> False + +decodeUtf8Text :: LByteString -> Text +decodeUtf8Text = TextEncoding.decodeUtf8With TextEncodingError.lenientDecode . LBS.toStrict + +encodeUtf8Text :: Text -> LByteString +encodeUtf8Text = LBS.fromStrict . TextEncoding.encodeUtf8 + autoRefreshVaultKey :: Vault.Key (IORef AutoRefreshServer) autoRefreshVaultKey = unsafePerformIO Vault.newKey {-# NOINLINE autoRefreshVaultKey #-} @@ -270,4 +343,4 @@ autoRefreshServerFromRequest :: Request -> IORef AutoRefreshServer autoRefreshServerFromRequest request = case Vault.lookup autoRefreshVaultKey request.vault of Just server -> server - Nothing -> error "AutoRefresh middleware not initialized. Please make sure you have added the AutoRefresh middleware to your application." \ No newline at end of file + Nothing -> error "AutoRefresh middleware not initialized. Please make sure you have added the AutoRefresh middleware to your application." diff --git a/ihp/IHP/AutoRefresh/Types.hs b/ihp/IHP/AutoRefresh/Types.hs index df339dcbc..4dc29fe3b 100644 --- a/ihp/IHP/AutoRefresh/Types.hs +++ b/ihp/IHP/AutoRefresh/Types.hs @@ -11,6 +11,8 @@ import Control.Concurrent.MVar (MVar) import qualified IHP.PGListener as PGListener data AutoRefreshState = AutoRefreshDisabled | AutoRefreshEnabled { sessionId :: !UUID } + +newtype AutoRefreshTarget = AutoRefreshTarget Text deriving (Eq, Show) data AutoRefreshSession = AutoRefreshSession { id :: !UUID -- | A callback to rerun an action within a given request context diff --git a/ihp/IHP/AutoRefresh/View.hs b/ihp/IHP/AutoRefresh/View.hs index 9ffc4c190..76f8f9a5a 100644 --- a/ihp/IHP/AutoRefresh/View.hs +++ b/ihp/IHP/AutoRefresh/View.hs @@ -9,4 +9,6 @@ import IHP.Controller.Context autoRefreshMeta :: (?context :: ControllerContext) => Html5.Html autoRefreshMeta = case fromFrozenContext @AutoRefreshState of AutoRefreshDisabled -> mempty - AutoRefreshEnabled { sessionId } -> [hsx||] + AutoRefreshEnabled { sessionId } -> case maybeFromFrozenContext @AutoRefreshTarget of + Just (AutoRefreshTarget target) -> [hsx||] + Nothing -> [hsx||] diff --git a/ihp/Test/AutoRefreshSpec.hs b/ihp/Test/AutoRefreshSpec.hs new file mode 100644 index 000000000..09a36b57b --- /dev/null +++ b/ihp/Test/AutoRefreshSpec.hs @@ -0,0 +1,67 @@ +{-| +Module: Test.AutoRefreshSpec + +Unit tests for Auto Refresh helpers. +-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE QuasiQuotes #-} +module Test.AutoRefreshSpec where + +import Test.Hspec +import IHP.Prelude +import qualified Data.UUID as UUID +import IHP.AutoRefresh +import IHP.AutoRefresh.View +import IHP.AutoRefresh.Types +import IHP.Controller.Context +import Text.Blaze.Html.Renderer.Text (renderHtml) + +import IHP.HSX.QQ (hsx) +import qualified Text.Blaze.Html5 as Html5 + +renderMeta :: (?context :: ControllerContext) => Text +renderMeta = cs (renderHtml autoRefreshMeta) + +withFreshContext :: (ControllerContext -> IO a) -> IO a +withFreshContext block = do + let ?requestContext = undefined + context <- newControllerContext + block context + +freezeContext :: ControllerContext -> IO ControllerContext +freezeContext = freeze + +tests :: Spec +tests = describe "AutoRefresh" do + it "stores AutoRefreshTarget in the controller context" do + withFreshContext \context -> do + let ?context = context + setAutoRefreshTarget "#chat-pane" + fromContext @AutoRefreshTarget `shouldReturn` AutoRefreshTarget "#chat-pane" + + it "renders meta tag with target attribute when target is set" do + withFreshContext \context -> do + let ?context = context + putContext AutoRefreshDisabled + putContext (AutoRefreshEnabled UUID.nil) + setAutoRefreshTarget "#chat-pane" + frozen <- freezeContext ?context + let ?context = frozen + renderMeta `shouldBe` "" + + it "renders meta tag without target attribute when no target is set" do + withFreshContext \context -> do + let ?context = context + putContext AutoRefreshDisabled + putContext (AutoRefreshEnabled UUID.nil) + frozen <- freezeContext ?context + let ?context = frozen + renderMeta `shouldBe` "" + + it "renders nothing when auto refresh is disabled" do + withFreshContext \context -> do + let ?context = context + putContext AutoRefreshDisabled + frozen <- freezeContext ?context + let ?context = frozen + renderMeta `shouldBe` "" diff --git a/ihp/Test/AutoRefreshSpecMain.hs b/ihp/Test/AutoRefreshSpecMain.hs new file mode 100644 index 000000000..06828d7bc --- /dev/null +++ b/ihp/Test/AutoRefreshSpecMain.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE NoImplicitPrelude #-} +module Main where + +import IHP.Prelude +import Test.Hspec +import qualified Test.AutoRefreshSpec as AutoRefreshSpec + +main :: IO () +main = hspec AutoRefreshSpec.tests diff --git a/ihp/Test/Main.hs b/ihp/Test/Main.hs new file mode 100644 index 000000000..0286d6247 --- /dev/null +++ b/ihp/Test/Main.hs @@ -0,0 +1,89 @@ +{-| +Module: Test.IDE.SchemaDesigner.CompilerSpec +Description: Entrypoint to the hspec Testsuite +Copyright: (c) digitally induced GmbH, 2020 + +When in the IHP directory, you can run this file like: + + > nix-shell + > ghci + > :l Test/Main.hs + > main + +-} +module Main where + +import Test.Hspec +import IHP.Prelude + +import qualified Test.IDE.SchemaDesigner.CompilerSpec +import qualified Test.IDE.SchemaDesigner.ParserSpec +import qualified Test.IDE.SchemaDesigner.Controller.EnumValuesSpec +import qualified Test.IDE.SchemaDesigner.Controller.HelperSpec +import qualified Test.IDE.SchemaDesigner.Controller.ValidationSpec +import qualified Test.IDE.SchemaDesigner.SchemaOperationsSpec +import qualified Test.ValidationSupport.ValidateFieldSpec +import qualified Test.IDE.CodeGeneration.ControllerGenerator +import qualified Test.IDE.CodeGeneration.ViewGenerator +import qualified Test.IDE.CodeGeneration.MailGenerator +import qualified Test.IDE.CodeGeneration.JobGenerator +import qualified Test.NameSupportSpec +import qualified Test.HaskellSupportSpec +import qualified Test.View.CSSFrameworkSpec +import qualified Test.View.FormSpec +import qualified Test.Controller.ContextSpec +import qualified Test.Controller.ParamSpec +import qualified Test.Controller.CookieSpec +import qualified Test.Controller.AccessDeniedSpec +import qualified Test.Controller.NotFoundSpec +import qualified Test.SchemaMigrationSpec +import qualified Test.ModelSupportSpec +import qualified Test.SchemaCompilerSpec +import qualified Test.QueryBuilderSpec +import qualified Test.RouterSupportSpec +import qualified Test.ViewSupportSpec +import qualified Test.AutoRefreshSpec +import qualified Test.ServerSideComponent.HtmlParserSpec +import qualified Test.ServerSideComponent.HtmlDiffSpec +import qualified Test.FileStorage.ControllerFunctionsSpec +import qualified Test.DataSync.DynamicQueryCompiler +import qualified Test.IDE.CodeGeneration.MigrationGenerator +import qualified Test.PGListenerSpec +import qualified Test.SEO.Sitemap + +main :: IO () +main = hspec do + Test.IDE.SchemaDesigner.CompilerSpec.tests + Test.IDE.SchemaDesigner.ParserSpec.tests + Test.IDE.SchemaDesigner.Controller.EnumValuesSpec.tests + Test.IDE.SchemaDesigner.Controller.HelperSpec.tests + Test.IDE.SchemaDesigner.Controller.ValidationSpec.tests + Test.ValidationSupport.ValidateFieldSpec.tests + Test.IDE.CodeGeneration.ControllerGenerator.tests + Test.IDE.CodeGeneration.ViewGenerator.tests + Test.IDE.CodeGeneration.MailGenerator.tests + Test.IDE.CodeGeneration.JobGenerator.tests + Test.NameSupportSpec.tests + Test.HaskellSupportSpec.tests + Test.View.CSSFrameworkSpec.tests + Test.View.FormSpec.tests + Test.Controller.ContextSpec.tests + Test.Controller.ParamSpec.tests + Test.Controller.AccessDeniedSpec.tests + Test.Controller.NotFoundSpec.tests + Test.SchemaMigrationSpec.tests + Test.ModelSupportSpec.tests + Test.SchemaCompilerSpec.tests + Test.QueryBuilderSpec.tests + Test.RouterSupportSpec.tests + Test.ViewSupportSpec.tests + Test.AutoRefreshSpec.tests + Test.ServerSideComponent.HtmlParserSpec.tests + Test.ServerSideComponent.HtmlDiffSpec.tests + Test.FileStorage.ControllerFunctionsSpec.tests + Test.DataSync.DynamicQueryCompiler.tests + Test.IDE.SchemaDesigner.SchemaOperationsSpec.tests + Test.IDE.CodeGeneration.MigrationGenerator.tests + Test.Controller.CookieSpec.tests + Test.PGListenerSpec.tests + Test.SEO.Sitemap.tests diff --git a/ihp/data/static/ihp-auto-refresh.js b/ihp/data/static/ihp-auto-refresh.js index 492cf0d95..0c8b2df87 100644 --- a/ihp/data/static/ihp-auto-refresh.js +++ b/ihp/data/static/ihp-auto-refresh.js @@ -1,80 +1,275 @@ -var socket = null; -var sessionId = null; +var autoRefreshSessions = {}; var autoRefreshPaused = false; +var autoRefreshTargetCounter = 0; -function autoRefreshView() { - var metaTag = document.querySelector('meta[property="ihp-auto-refresh-id"]'); +var morphdomOptions = { + getNodeKey: function (el) { + var key = el.id; + if (el.id) { + key = el.id; + } else if (el.form && el.name) { + key = el.name + "_" + el.form.action; + } else if (el instanceof HTMLFormElement) { + key = "form#" + el.action; + } else if (el instanceof HTMLScriptElement) { + key = el.src; + } + return key; + }, + onBeforeElChildrenUpdated: function (fromEl, toEl) { + if (fromEl.tagName === 'TEXTAREA' || fromEl.tagName === 'INPUT') { + toEl.checked = fromEl.checked; + toEl.value = fromEl.value; + } else if (fromEl.tagName === 'OPTION') { + toEl.selected = fromEl.selected; + } + } +}; - if (!metaTag) { - if (socket) { - console.log('Closing socket'); - socket.close(); +function cloneMorphdomOptions(extraOptions) { + var clone = {}; + var key; + for (key in morphdomOptions) { + if (Object.prototype.hasOwnProperty.call(morphdomOptions, key)) { + clone[key] = morphdomOptions[key]; + } + } + for (key in extraOptions) { + if (Object.prototype.hasOwnProperty.call(extraOptions, key)) { + clone[key] = extraOptions[key]; } + } + return clone; +} + +function escapeCssIdentifier(value) { + if (window.CSS && CSS.escape) { + return CSS.escape(value); + } + return value.replace(/([^\w-])/g, '\\$1'); +} + +function inferTargetSelector(meta, fallbackTarget) { + if (fallbackTarget && fallbackTarget.id) { + return '#' + escapeCssIdentifier(fallbackTarget.id); + } + if (meta && meta.parentElement && meta.parentElement.id) { + return '#' + escapeCssIdentifier(meta.parentElement.id); + } + return null; +} + +function ensureTargetHasId(target) { + if (!target || target.id) { return; } - var socketProtocol = location.protocol === 'https:' ? 'wss' : 'ws'; - var socketHost = socketProtocol + "://" + window.location.hostname + ":" + document.location.port + '/AutoRefreshWSApp'; - if (socket && metaTag.content === sessionId) { - // Socket is already running + var id; + do { + autoRefreshTargetCounter += 1; + id = 'ihp-auto-refresh-target-' + autoRefreshTargetCounter; + } while (document.getElementById(id)); + + target.id = id; +} + +function getMetaTarget(meta) { + if (!meta) { + return null; + } + var target = meta.getAttribute('data-ihp-auto-refresh-target'); + return target && target.length > 0 ? target : null; +} + +function getSessionKey(config) { + return config.target ? 'target:' + config.target : 'body'; +} + +function replaceAutoRefreshMeta(meta) { + if (!meta) { return; - } else if (socket) { - // Socket is running, but the page has changed - socket.close(); - socket = new WebSocket(socketHost); - sessionId = metaTag.content; - } else { - // First setup of socket - socket = new WebSocket(socketHost); - sessionId = metaTag.content; } + var metaTarget = getMetaTarget(meta); + var existing = document.head.querySelectorAll('meta[property="ihp-auto-refresh-id"]'); + Array.prototype.forEach.call(existing, function (node) { + var nodeTarget = getMetaTarget(node); + if (nodeTarget === metaTarget && node.parentNode) { + node.parentNode.removeChild(node); + } + }); + document.head.appendChild(meta); +} + +function harvestAutoRefreshMetaFromNode(node, fallbackTarget) { + if (!node || !node.querySelectorAll) { + return; + } + + var metas = node.querySelectorAll('meta[property="ihp-auto-refresh-id"]'); + Array.prototype.forEach.call(metas, function (meta) { + if (meta.parentNode && meta.parentNode.tagName === 'HEAD') { + return; + } + + var clone = meta.cloneNode(true); + if (!clone.getAttribute('data-ihp-auto-refresh-target')) { + var inferredTarget = inferTargetSelector(meta, fallbackTarget); + if (inferredTarget) { + clone.setAttribute('data-ihp-auto-refresh-target', inferredTarget); + } + } + meta.parentNode.removeChild(meta); + replaceAutoRefreshMeta(clone); + }); +} + +function readAutoRefreshConfigs() { + var metas = document.head.querySelectorAll('meta[property="ihp-auto-refresh-id"]'); + + if (!metas || metas.length === 0) { + harvestAutoRefreshMetaFromNode(document); + metas = document.head.querySelectorAll('meta[property="ihp-auto-refresh-id"]'); + } + + if (!metas || metas.length === 0) { + return []; + } + + var configs = []; + var seen = {}; + Array.prototype.forEach.call(metas, function (meta) { + var target = getMetaTarget(meta); + var config = { + sessionId: meta.content, + target: target + }; + var key = getSessionKey(config); + if (seen[key]) { + return; + } + seen[key] = true; + configs.push(config); + }); + return configs; +} + +function ensureSocketClosed(session) { + if (session && session.socket) { + session.socket.close(); + session.socket = null; + } +} + +function socketHost() { + var socketProtocol = location.protocol === 'https:' ? 'wss' : 'ws'; + return socketProtocol + '://' + window.location.hostname + ':' + document.location.port + '/AutoRefreshWSApp'; +} + +function openAutoRefreshSession(config, key) { + var session = { + sessionId: config.sessionId, + targetSelector: config.target || null, + socket: null + }; + + var socket = new WebSocket(socketHost()); + session.socket = socket; autoRefreshPaused = false; - socket.onopen = function (event) { - socket.send(metaTag.content); + socket.onopen = function () { + socket.send(session.sessionId); }; socket.onmessage = function (event) { - var html = event.data; - var parser = new DOMParser(); - var dom = parser.parseFromString(html, 'text/html'); + handleIncomingHtml(event.data, session); + }; + + socket.onclose = function () { + if (autoRefreshSessions[key] === session) { + delete autoRefreshSessions[key]; + } + }; + + return session; +} + +function closeAllSessions() { + Object.keys(autoRefreshSessions).forEach(function (key) { + ensureSocketClosed(autoRefreshSessions[key]); + delete autoRefreshSessions[key]; + }); +} - if (autoRefreshPaused) { +function autoRefreshView() { + var configs = readAutoRefreshConfigs(); + + if (!configs || configs.length === 0) { + closeAllSessions(); + return; + } + + var nextKeys = {}; + configs.forEach(function (config) { + if (!config.sessionId) { return; } + var key = getSessionKey(config); + nextKeys[key] = true; + var existing = autoRefreshSessions[key]; + if (existing && existing.sessionId === config.sessionId) { + return; + } + if (existing) { + ensureSocketClosed(existing); + } + autoRefreshSessions[key] = openAutoRefreshSession(config, key); + }); - morphdom(document.body, dom.body, { - getNodeKey: function (el) { - - var key = el.id; - if (el.id) { - key = el.id; - } else if (el.form && el.name) { - key = el.name + "_" + el.form.action; - } else if (el instanceof HTMLFormElement) { - key = "form#" + el.action; - } else if (el instanceof HTMLScriptElement) { - key = el.src; - } - return key; - }, - onBeforeElChildrenUpdated: function(fromEl, toEl) { - if (fromEl.tagName === 'TEXTAREA' || fromEl.tagName === 'INPUT') { - toEl.checked = fromEl.checked; - toEl.value = fromEl.value; - } else if (fromEl.tagName === 'OPTION') { - toEl.selected = fromEl.selected; - } - } - }); + Object.keys(autoRefreshSessions).forEach(function (key) { + if (!nextKeys[key]) { + ensureSocketClosed(autoRefreshSessions[key]); + delete autoRefreshSessions[key]; + } + }); +} - window.clearAllIntervals(); - window.clearAllTimeouts(); - - var event = new CustomEvent('turbolinks:load', {}); - document.dispatchEvent(event); - }; +function handleIncomingHtml(html, session) { + if (autoRefreshPaused) { + return; + } + + var parser = new DOMParser(); + var dom = parser.parseFromString(html, 'text/html'); + + var fallbackTarget = null; + if (session && session.targetSelector) { + fallbackTarget = document.querySelector(session.targetSelector); + } + harvestAutoRefreshMetaFromNode(dom, fallbackTarget); + autoRefreshView(); + + var targetSelector = session ? session.targetSelector : null; + if (targetSelector) { + var target = document.querySelector(targetSelector); + if (!target) { + return; + } + var newTarget = dom.querySelector(targetSelector); + if (newTarget) { + morphdom(target, newTarget, morphdomOptions); + } else { + morphdom(target, dom.body, cloneMorphdomOptions({ childrenOnly: true })); + } + } else { + morphdom(document.body, dom.body, morphdomOptions); + } + + window.clearAllIntervals(); + window.clearAllTimeouts(); + + var loadEvent = new CustomEvent('turbolinks:load', {}); + document.dispatchEvent(loadEvent); } /* Called by helpers.js when a form was just submitted and we're waiting for a response from the server */ @@ -86,4 +281,13 @@ if (window.Turbolinks) { document.addEventListener('turbolinks:load', autoRefreshView); } else { autoRefreshView(); -} \ No newline at end of file +} + +if (window.htmx) { + document.addEventListener('htmx:afterSwap', function (event) { + var target = event && event.detail && event.detail.target ? event.detail.target : event.target; + ensureTargetHasId(target); + harvestAutoRefreshMetaFromNode(target, target); + autoRefreshView(); + }); +} From 64e16691714677b148ff22b00e2d728d677d2c37 Mon Sep 17 00:00:00 2001 From: vcombey Date: Sun, 18 Jan 2026 11:29:46 +0100 Subject: [PATCH 13/13] tests --- ihp/IHP/AutoRefresh.hs | 7 ++++--- ihp/Test/AutoRefreshSpecMain.hs | 1 + ihp/Test/{ => Test}/AutoRefreshSpec.hs | 0 ihp/Test/Test/Main.hs | 1 - 4 files changed, 5 insertions(+), 4 deletions(-) rename ihp/Test/{ => Test}/AutoRefreshSpec.hs (100%) diff --git a/ihp/IHP/AutoRefresh.hs b/ihp/IHP/AutoRefresh.hs index dc3653b35..9fe5989f6 100644 --- a/ihp/IHP/AutoRefresh.hs +++ b/ihp/IHP/AutoRefresh.hs @@ -22,6 +22,7 @@ import qualified Control.Exception as Exception import qualified Control.Concurrent.MVar as MVar import qualified Data.Maybe as Maybe import qualified Data.Text as Text +import qualified Data.Text.Lazy as LazyText import qualified Data.Text.Encoding as TextEncoding import qualified Data.Text.Encoding.Error as TextEncodingError import IHP.WebSocket @@ -34,7 +35,7 @@ import qualified IHP.Log as Log import qualified Data.Vault.Lazy as Vault import System.IO.Unsafe (unsafePerformIO) import Network.Wai -import Network.HTTP.Types.Header (hContentType) +import Network.HTTP.Types.Header (ResponseHeaders, hContentType) import qualified Text.Blaze.Html.Renderer.Text as BlazeText initAutoRefresh :: (?context :: ControllerContext) => IO () @@ -276,7 +277,7 @@ notificationTrigger tableName = PG.Query [i| ensureAutoRefreshMeta :: (?context :: ControllerContext) => ResponseHeaders -> LByteString -> IO LByteString ensureAutoRefreshMeta headers html | not (isHtmlResponse headers) = pure html - | LBS.isInfixOf "ihp-auto-refresh-id" html = pure html + | BS.isInfixOf "ihp-auto-refresh-id" (LBS.toStrict html) = pure html | otherwise = do meta <- renderAutoRefreshMeta if LBS.null meta @@ -287,7 +288,7 @@ renderAutoRefreshMeta :: (?context :: ControllerContext) => IO LByteString renderAutoRefreshMeta = do frozenContext <- freeze ?context let ?context = frozenContext - let metaText = BlazeText.renderHtml autoRefreshMeta + let metaText = LazyText.toStrict (BlazeText.renderHtml autoRefreshMeta) pure (encodeUtf8Text metaText) insertAutoRefreshMeta :: LByteString -> LByteString -> LByteString diff --git a/ihp/Test/AutoRefreshSpecMain.hs b/ihp/Test/AutoRefreshSpecMain.hs index 06828d7bc..0840a16d6 100644 --- a/ihp/Test/AutoRefreshSpecMain.hs +++ b/ihp/Test/AutoRefreshSpecMain.hs @@ -1,4 +1,5 @@ {-# LANGUAGE NoImplicitPrelude #-} +{-# OPTIONS_GHC -iTest #-} module Main where import IHP.Prelude diff --git a/ihp/Test/AutoRefreshSpec.hs b/ihp/Test/Test/AutoRefreshSpec.hs similarity index 100% rename from ihp/Test/AutoRefreshSpec.hs rename to ihp/Test/Test/AutoRefreshSpec.hs diff --git a/ihp/Test/Test/Main.hs b/ihp/Test/Test/Main.hs index c93fd121d..0e56c55e9 100644 --- a/ihp/Test/Test/Main.hs +++ b/ihp/Test/Test/Main.hs @@ -38,4 +38,3 @@ main = hspec do Test.FileStorage.ControllerFunctionsSpec.tests Test.Controller.CookieSpec.tests Test.PGListenerSpec.tests - Test.TypedSqlSpec.tests