Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions frameworks/FSharp/giraffe/benchmark_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"json_url": "/json",
"db_url": "/db",
"query_url": "/queries?queries=",
"update_url": "/updates?queries=",
"fortune_url": "/fortunes",
"port": 8080,
"approach": "Realistic",
Expand Down
3 changes: 2 additions & 1 deletion frameworks/FSharp/giraffe/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@
name = "giraffe"

[main]
urls.plaintext = "/plaintext"
urls.json = "/json"
urls.db = "/db"
urls.query = "/queries?queries="
urls.fortune = "/fortunes"
urls.update = "/updates?queries="
urls.plaintext = "/plaintext"
approach = "Realistic"
classification = "fullstack"
database = "Postgres"
Expand Down
2 changes: 1 addition & 1 deletion frameworks/FSharp/giraffe/src/App/App.fsproj
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

<ItemGroup>
<PackageReference Update="FSharp.Core" Version="9.0.303" />
<PackageReference Include="Dapper" Version="2.1.66" />
<PackageReference Include="Giraffe" Version="8.1.0-alpha-001" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
<PackageReference Include="Npgsql" Version="9.0.4" />
<PackageReference Include="Dapper" Version="2.1.66" />
</ItemGroup>

<ItemGroup>
Expand Down
147 changes: 126 additions & 21 deletions frameworks/FSharp/giraffe/src/App/Program.fs
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@ module Common =
Database=hello_world;
User Id=benchmarkdbuser;
Password=benchmarkdbpass;
SSL Mode=Disable;
Maximum Pool Size=1024;
NoResetOnClose=true;
Enlist=false;
Max Auto Prepare=4;
Multiplexing=true;
Write Coalescing Buffer Threshold Bytes=1000
Max Auto Prepare=4
"""

[<Literal>]
let MultiplexedConnectionString = ConnectionString + ";Multiplexing=true"

[<Struct>]
type JsonMode =
| System
Expand All @@ -46,8 +46,43 @@ module Common =

member _.Next () = rnd.Next (min, max)

[<Literal>]
let MaxDegreeOfParallelism = 3
// Cache SQL strings for bulk updates to avoid rebuilding on every request
// This module generates and caches SQL UPDATE statements that use PostgreSQL's
// VALUES clause to update multiple rows in a single statement.
module BatchUpdateSql =
let private cache = Array.zeroCreate<string> 501

let get (count: int) =
match cache.[count] with
| null ->
let lastIndex = count - 1

// Build the VALUES clause: (@Id_0, @Rn_0), (@Id_1, @Rn_1), ...
// Each pair represents (id, new_randomnumber) for one row
let valueClauses =
List.init lastIndex (fun i -> sprintf "(@Id_%d, @Rn_%d), " i i)
|> String.concat ""

// The final SQL uses a CTE-like VALUES construct to update multiple rows:
// UPDATE world SET randomnumber = temp.randomnumber
// FROM (VALUES ...) AS temp(id, randomnumber)
// WHERE temp.id = world.id
let sql =
sprintf
"""
UPDATE world
SET randomnumber = temp.randomnumber
FROM (VALUES %s(@Id_%d, @Rn_%d) ORDER BY 1)
AS temp(id, randomnumber)
WHERE temp.id = world.id
"""
valueClauses
lastIndex
lastIndex

cache.[count] <- sql
sql
| sql -> sql

[<RequireQualifiedAccess>]
module HtmlViews =
Expand Down Expand Up @@ -87,10 +122,12 @@ module HttpHandlers =
message = "Additional fortune added at request time."
}

let private fortunes: HttpHandler =
let fortunes: HttpHandler =
fun _ ctx ->
task {
use conn = new NpgsqlConnection (ConnectionString)
let dataSource = ctx.GetService<NpgsqlDataSource> ()
use conn = dataSource.CreateConnection ()
do! conn.OpenAsync ()

let! data = conn.QueryAsync<Fortune> ("SELECT id, message FROM fortune")

Expand All @@ -106,11 +143,13 @@ module HttpHandlers =
return! ctx.WriteBytesAsync bytes
}

let private db: HttpHandler =
let db: HttpHandler =
fun _ ctx ->
task {
let rnd = ctx.GetService<RandomUtil> ()
use conn = new NpgsqlConnection (ConnectionString)
let dataSource = ctx.GetService<NpgsqlDataSource> ()
use conn = dataSource.CreateConnection ()
do! conn.OpenAsync ()

let! data =
conn.QuerySingleAsync<World> (
Expand All @@ -121,7 +160,7 @@ module HttpHandlers =
return! ctx.WriteJsonAsync data
}

let private queries: HttpHandler =
let queries: HttpHandler =
fun _ ctx ->
task {
let queryParam =
Expand All @@ -137,30 +176,94 @@ module HttpHandlers =
|> Option.defaultValue 1

let rnd = ctx.GetService<RandomUtil> ()
let dataSource = ctx.GetService<NpgsqlDataSource> ()

use conn = dataSource.CreateConnection ()
do! conn.OpenAsync ()

let! res =
Array.init queryParam (fun _ -> rnd.Next ())
|> Array.map (fun id ->
use conn = new NpgsqlConnection (ConnectionString)
// Read all rows sequentially
let results = Array.zeroCreate<World> queryParam

for i in 0 .. queryParam - 1 do
let! world =
conn.QuerySingleAsync<World> (
"SELECT id, randomnumber FROM world WHERE id = @Id",
{| Id = id |}
{| Id = rnd.Next () |}
)
|> Async.AwaitTask

results.[i] <- world

return! ctx.WriteJsonAsync results
}

let updates: HttpHandler =
fun _ ctx ->
task {
let queryParam =
ctx.TryGetQueryStringValue "queries"
|> Option.map (fun value ->
match System.Int32.TryParse value with
| true, intValue ->
if intValue < 1 then 1
elif intValue > 500 then 500
else intValue
| false, _ -> 1
)
|> fun computations ->
Async.Parallel (computations, MaxDegreeOfParallelism)
|> Option.defaultValue 1

let rnd = ctx.GetService<RandomUtil> ()

// Use multiplexed connection for updates (more efficient for sequential ops)
use conn = new NpgsqlConnection (MultiplexedConnectionString)
do! conn.OpenAsync ()

// Read all rows sequentially
let readResults = Array.zeroCreate<World> queryParam

return! ctx.WriteJsonAsync res
for i in 0 .. queryParam - 1 do
let! world =
conn.QuerySingleAsync<World> (
"SELECT id, randomnumber FROM world WHERE id = @Id",
{| Id = rnd.Next () |}
)

readResults.[i] <- world

// Update random numbers functionally
let updatedData =
readResults
|> Array.map (fun data -> { data with randomNumber = rnd.Next () })

// Build bulk update parameters functionally
// We use a single UPDATE statement with a VALUES clause to update all rows at once.
//
// Example SQL for 2 rows:
// UPDATE world SET randomnumber = temp.randomnumber
// FROM (VALUES (@Id_0, @Rn_0), (@Id_1, @Rn_1) ORDER BY 1) AS temp(id, randomnumber)
// WHERE temp.id = world.id
let updateParams =
updatedData
|> Array.mapi (fun i data -> [
sprintf "@Id_%d" i, box data.id // Parameter for the id
sprintf "@Rn_%d" i, box data.randomNumber // Parameter for the new random number
])
|> Array.collect List.toArray // Flatten the list of parameter pairs
|> dict // Convert to dictionary for Dapper

// Execute bulk update using Dapper
let sql = BatchUpdateSql.get queryParam
let! _ = conn.ExecuteAsync (sql, updateParams)

return! ctx.WriteJsonAsync updatedData
}

let endpoints: Endpoint list = [
route "/plaintext" (text "Hello, World!")
route "/json" (json {| message = "Hello, World!" |})
route "/db" db
route "/queries" queries
route "/fortunes" fortunes
route "/updates" updates
route "/plaintext" (text "Hello, World!")
]


Expand All @@ -173,6 +276,7 @@ module Main =
open Microsoft.Extensions.Logging
open System.Text.Json
open Newtonsoft.Json
open Npgsql

[<EntryPoint>]
let main args =
Expand Down Expand Up @@ -202,6 +306,7 @@ module Main =
builder.Services
.AddSingleton(jsonSerializer)
.AddSingleton<RandomUtil>(rnd)
.AddSingleton<NpgsqlDataSource>(NpgsqlDataSource.Create (ConnectionString))
.AddGiraffe ()
|> ignore

Expand Down
Loading