From 3349d2066390b00f10a5f94f4b7a157c95ba9c33 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sat, 24 Jan 2026 19:48:55 -0300 Subject: [PATCH 01/44] feat: body --- src/Std/Internal/Http/Data.lean | 1 + src/Std/Internal/Http/Data/Body.lean | 92 +++++ .../Internal/Http/Data/Body/ChunkStream.lean | 363 ++++++++++++++++++ src/Std/Internal/Http/Data/Body/Full.lean | 202 ++++++++++ src/Std/Internal/Http/Data/Body/Length.lean | 49 +++ src/Std/Internal/Http/Data/Chunk.lean | 10 +- .../Internal/Http/Internal/ChunkedBuffer.lean | 82 ++-- tests/lean/run/async_http_body.lean | 301 +++++++++++++++ 8 files changed, 1065 insertions(+), 35 deletions(-) create mode 100644 src/Std/Internal/Http/Data/Body.lean create mode 100644 src/Std/Internal/Http/Data/Body/ChunkStream.lean create mode 100644 src/Std/Internal/Http/Data/Body/Full.lean create mode 100644 src/Std/Internal/Http/Data/Body/Length.lean create mode 100644 tests/lean/run/async_http_body.lean diff --git a/src/Std/Internal/Http/Data.lean b/src/Std/Internal/Http/Data.lean index 653e05915346..9678d6d3ee54 100644 --- a/src/Std/Internal/Http/Data.lean +++ b/src/Std/Internal/Http/Data.lean @@ -14,6 +14,7 @@ public import Std.Internal.Http.Data.Status public import Std.Internal.Http.Data.Chunk public import Std.Internal.Http.Data.Headers public import Std.Internal.Http.Data.URI +public import Std.Internal.Http.Data.Body /-! # HTTP Data Types diff --git a/src/Std/Internal/Http/Data/Body.lean b/src/Std/Internal/Http/Data/Body.lean new file mode 100644 index 000000000000..30b8d5b63456 --- /dev/null +++ b/src/Std/Internal/Http/Data/Body.lean @@ -0,0 +1,92 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Async.ContextAsync +public import Std.Internal.Http.Data.Headers +public import Std.Internal.Http.Data.Body.Length +public import Std.Internal.Http.Data.Body.ChunkStream +public import Std.Internal.Http.Data.Body.Full + +public section + +/-! +# Body + +This module defines the `Body` typeclass, which provides a uniform interface for HTTP body types +including streaming and fully-buffered bodies. +-/ + +namespace Std.Http + +set_option linter.all true + +open Std Internal IO Async + +/-- +Typeclass that provides a uniform interface for HTTP body types. Implementations include +streaming bodies (`ByteStream`, `ChunkStream`) and fully-buffered bodies (`Full`). +-/ +class Body (α : Type) (β : outParam Type) where + /-- + Non-blocking receive. Returns `none` if the stream is closed or has ended, + `some` if data is available. + -/ + recv? : α → Async (Option β) + + /-- + Blocking receive. Blocks if no data is available yet. Returns `none` if the stream + is closed or has ended, `some` if data becomes available. If an amount is specified, + accumulates bytes up to that size before returning. + -/ + recv : α → Option UInt64 → Async (Option β) + + /-- + Send data to the body. May block if the buffer is full. + -/ + send : α → β → Async Unit + + /-- + Checks if the body is closed. + -/ + isClosed : α → Async Bool + + /-- + Returns the known size of the body if available. + -/ + size? : α → Async (Option Body.Length) + + /-- + Creates an empty body. + -/ + empty : Async α + + /-- + Creates a `Selector` for multiplexing receive operations. Resolves once data is available + and provides it, or returns `none` when the body is closed. + -/ + recvSelector : α → Selector (Option β) + +instance : Body Body.ChunkStream Chunk where + recv? := Body.ChunkStream.tryRecv + recv := Body.ChunkStream.recv + send := Body.ChunkStream.send + isClosed := Body.ChunkStream.isClosed + size? := Body.ChunkStream.getKnownSize + empty := Body.ChunkStream.empty + recvSelector := Body.ChunkStream.recvSelector + +instance : Body Body.Full Chunk where + recv? full := do return (← Body.Full.recv? full).map Chunk.ofByteArray + recv full count := do return (← Body.Full.recv full count).map Chunk.ofByteArray + send full chunk := Body.Full.send full chunk.data + isClosed := Body.Full.isClosed + size? := Body.Full.size? + empty := Body.Full.empty + recvSelector := Body.Full.recvSelector + +end Std.Http diff --git a/src/Std/Internal/Http/Data/Body/ChunkStream.lean b/src/Std/Internal/Http/Data/Body/ChunkStream.lean new file mode 100644 index 000000000000..1d0eddf4024c --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/ChunkStream.lean @@ -0,0 +1,363 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Sync +public import Std.Internal.Async +public import Std.Internal.Http.Data.Chunk +public import Std.Internal.Http.Data.Body.Length +public import Init.Data.Queue + +public section + +/-! +# ChunkStream + +A `ChunkStream` represents an asynchronous channel for streaming data in chunks. It provides an +interface for producers and consumers to exchange chunks with optional metadata (extensions), +making it suitable for HTTP chunked transfer encoding and other streaming scenarios. +-/ + +namespace Std.Http.Body +open Std Internal IO Async + +set_option linter.all true + +namespace ChunkStream + +open Internal.IO.Async in + +private inductive Consumer where + | normal (promise : IO.Promise (Option Chunk)) + | select (finished : Waiter (Option Chunk)) + +private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := do + match c with + | .normal promise => + promise.resolve x + return true + | .select waiter => + let lose := return false + let win promise := do + promise.resolve (.ok x) + return true + waiter.race lose win + +private structure Producer where + chunk : Chunk + promise : IO.Promise Bool + +private structure State where + /-- + Chunks pushed into the stream that are waiting to be consumed. + -/ + values : Std.Queue Chunk + + /-- + Current number of chunks buffered in the stream. + -/ + amount : Nat + + /-- + Maximum number of chunks allowed in the buffer. Writers block when amount ≥ capacity. + -/ + capacity : Nat + + /-- + Consumers that are blocked on a producer providing them a chunk. They will be resolved to `none` + if the stream closes. + -/ + consumers : Std.Queue Consumer + + /-- + Producers that are blocked waiting for buffer space to become available. + -/ + producers : Std.Queue Producer + + /-- + Whether the stream is closed already. + -/ + closed : Bool + /-- + Known size of the stream if available. + -/ + knownSize : Option Body.Length +deriving Nonempty + +end ChunkStream + +/-- +A channel for chunks with support for chunk extensions. +-/ +structure ChunkStream where + private mk :: + private state : Mutex ChunkStream.State +deriving Nonempty + +namespace ChunkStream + +/-- +Creates a new ChunkStream with a specified capacity. +-/ +def emptyWithCapacity (capacity : Nat := 128) : Async ChunkStream := do + return { + state := ← Mutex.new { + values := ∅ + consumers := ∅ + producers := ∅ + amount := 0 + capacity + closed := false + knownSize := none + } + } + +/-- +Creates a new ChunkStream with default capacity. +-/ +@[always_inline, inline] +def empty : Async ChunkStream := + emptyWithCapacity + +private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length := + match knownSize with + | some (.fixed res) => some (Body.Length.fixed (res - chunk.size)) + | _ => knownSize + +private def tryWakeProducer [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : + AtomicT State m Unit := do + let st ← get + -- Try to wake a producer if we have space + if st.amount < st.capacity then + if let some (producer, producers) := st.producers.dequeue? then + let chunk := producer.chunk + if st.amount + 1 <= st.capacity then + set { st with + values := st.values.enqueue chunk, + amount := st.amount + 1, + producers + } + producer.promise.resolve true + else + set { st with producers := producers.enqueue producer } + +private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : + AtomicT State m (Option Chunk) := do + let st ← get + if let some (chunk, values) := st.values.dequeue? then + let newKnownSize := decreaseKnownSize st.knownSize chunk + let newAmount := st.amount - 1 + set { st with values, knownSize := newKnownSize, amount := newAmount } + tryWakeProducer + return some chunk + else + return none + +/-- +Attempts to receive a chunk from the stream. Returns `some` with a chunk when data is available, or `none` +when the stream is closed or no data is available. +-/ +def tryRecv (stream : ChunkStream) : Async (Option Chunk) := + stream.state.atomically do + tryRecv' + +private def recv' (stream : ChunkStream) : BaseIO (Task (Option Chunk)) := do + stream.state.atomically do + if let some chunk ← tryRecv' then + return .pure <| some chunk + else if (← get).closed then + return .pure none + else + let promise ← IO.Promise.new + modify fun st => { st with consumers := st.consumers.enqueue (.normal promise) } + return promise.result?.map (sync := true) (·.bind id) + +/-- +Receives a chunk from the stream. Blocks if no data is available yet. Returns `none` if the stream +is closed and no data is available. The amount parameter is ignored for chunk streams. +-/ +def recv (stream : ChunkStream) (_count : Option UInt64) : Async (Option Chunk) := do + Async.ofTask (← recv' stream) + +private def trySend' (chunk : Chunk) : AtomicT State BaseIO Bool := do + while true do + let st ← get + if let some (consumer, consumers) := st.consumers.dequeue? then + let newKnownSize := decreaseKnownSize st.knownSize chunk + let success ← consumer.resolve (some chunk) + set { st with consumers, knownSize := newKnownSize } + if success then + break + else + if st.amount + 1 <= st.capacity then + set { st with + values := st.values.enqueue chunk, + amount := st.amount + 1 + } + return true + else + return false + return true + +private def trySend (stream : ChunkStream) (chunk : Chunk) : BaseIO Bool := do + stream.state.atomically do + if (← get).closed then + return false + else + trySend' chunk + +private def send' (stream : ChunkStream) (chunk : Chunk) : BaseIO (Task (Except IO.Error Unit)) := do + stream.state.atomically do + if (← get).closed then + return .pure <| .error (.userError "channel closed") + else if ← trySend' chunk then + return .pure <| .ok () + else + let promise ← IO.Promise.new + let producer : Producer := { chunk, promise } + modify fun st => { st with producers := st.producers.enqueue producer } + return promise.result?.map (sync := true) fun res => + if res.getD false then .ok () else .error (.userError "channel closed") + +/-- +Sends a chunk to the stream. Blocks if the buffer is full. +-/ +def send (stream : ChunkStream) (chunk : Chunk) : Async Unit := do + if chunk.data.isEmpty then + return + + let res : AsyncTask _ ← send' stream chunk + await res + +/-- +Gets the known size of the stream if available. Returns `none` if the size is not known. +-/ +@[always_inline, inline] +def getKnownSize (stream : ChunkStream) : Async (Option Body.Length) := do + stream.state.atomically do + return (← get).knownSize + +/-- +Sets the known size of the stream. Use this when the total expected size is known ahead of time. +-/ +@[always_inline, inline] +def setKnownSize (stream : ChunkStream) (size : Option Body.Length) : Async Unit := do + stream.state.atomically do + modify fun st => { st with knownSize := size } + +/-- +Closes the stream, preventing further sends and causing pending/future +recv operations to return `none` when no data is available. +-/ +def close (stream : ChunkStream) : Async Unit := do + stream.state.atomically do + let st ← get + if st.closed then return () + for consumer in st.consumers.toArray do + discard <| consumer.resolve none + for producer in st.producers.toArray do + producer.promise.resolve false + set { st with consumers := ∅, producers := ∅, closed := true } + +/-- +Checks if the stream is closed. +-/ +@[always_inline, inline] +def isClosed (stream : ChunkStream) : Async Bool := do + stream.state.atomically do + return (← get).closed + +@[inline] +private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : + AtomicT State m Bool := do + let st ← get + return !st.values.isEmpty || st.closed + +open Internal.IO.Async in + +/-- +Creates a `Selector` that resolves once the `ChunkStream` has data available and provides that data. +-/ +def recvSelector (stream : ChunkStream) : Selector (Option Chunk) where + tryFn := do + stream.state.atomically do + if ← recvReady' then + let val ← tryRecv' + return some val + else + return none + + registerFn waiter := do + stream.state.atomically do + if ← recvReady' then + let lose := return () + let win promise := do + promise.resolve (.ok (← tryRecv')) + + waiter.race lose win + else + modify fun st => { st with consumers := st.consumers.enqueue (.select waiter) } + + unregisterFn := do + stream.state.atomically do + let st ← get + let consumers ← st.consumers.filterM + fun + | .normal .. => return true + | .select waiter => return !(← waiter.checkFinished) + set { st with consumers } + +/-- +Iterate over the stream content in chunks, processing each chunk with the given step function. +-/ +@[inline] +protected partial def forIn + {β : Type} (stream : ChunkStream) (acc : β) + (step : Chunk → β → Async (ForInStep β)) : Async β := do + + let rec @[specialize] loop (stream : ChunkStream) (acc : β) : Async β := do + if let some chunk ← stream.recv none then + match ← step chunk acc with + | .done res => return res + | .yield res => loop stream res + else + return acc + + loop stream acc + +/-- +Iterate over the stream content in chunks, processing each chunk with the given step function. +-/ +@[inline] +protected partial def forIn' + {β : Type} (stream : ChunkStream) (acc : β) + (step : Chunk → β → ContextAsync (ForInStep β)) : ContextAsync β := do + + let rec @[specialize] loop (stream : ChunkStream) (acc : β) : ContextAsync β := do + let data ← Selectable.one #[ + .case (stream.recvSelector) pure, + .case (← ContextAsync.doneSelector) (fun _ => pure none), + ] + + if let some chunk := data then + match ← step chunk acc with + | .done res => return res + | .yield res => loop stream res + else + return acc + + loop stream acc + +instance : ForIn Async ChunkStream Chunk where + forIn := Std.Http.Body.ChunkStream.forIn + +instance : ForIn ContextAsync ChunkStream Chunk where + forIn := Std.Http.Body.ChunkStream.forIn' + +end ChunkStream + +end Std.Http.Body diff --git a/src/Std/Internal/Http/Data/Body/Full.lean b/src/Std/Internal/Http/Data/Body/Full.lean new file mode 100644 index 000000000000..4a86a36446d8 --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Full.lean @@ -0,0 +1,202 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Sync +public import Std.Internal.Async +public import Std.Internal.Http.Data.Chunk +public import Std.Internal.Http.Data.Body.Length +public import Init.Data.ByteArray + +public section + +/-! +# Full + +A `Full` represents a fully-buffered HTTP body that contains data which can be consumed exactly once. +It wraps a `ByteArray` in a `Mutex`-protected `Option`, tracking whether the data has already been +consumed. +-/ + +namespace Std.Http.Body +open Std Internal IO Async + +set_option linter.all true + +/-- +Typeclass for types that can be converted to a `ByteArray`. +-/ +class ToByteArray (α : Type) where + + /-- + Transforms into a `ByteArray` + -/ + toByteArray : α → ByteArray + +instance : ToByteArray ByteArray where + toByteArray := id + +instance : ToByteArray String where + toByteArray := String.toUTF8 + +open Internal.IO.Async in + +private structure Full.State where + /-- + The stored data as ByteArray. `some` if not yet consumed, `none` if already consumed or empty. + -/ + data : Option ByteArray + + /-- + Whether the body has been closed. + -/ + closed : Bool + + /-- + Waiters registered via `recvSelector` waiting for data to become available. + -/ + waiters : Std.Queue (Waiter (Option Chunk)) +deriving Nonempty + +/-- +A fully-buffered body that stores data as a `ByteArray`. The data can be consumed exactly once +via `recv`. After consumption, subsequent `recv` calls return `none`. +-/ +structure Full where + private mk :: + private state : Mutex Full.State +deriving Nonempty + +namespace Full + +/-- +Creates a new `Full` body containing the given data converted to `ByteArray`. +-/ +def ofData [ToByteArray β] (data : β) : Async Full := do + return { state := ← Mutex.new { data := some (ToByteArray.toByteArray data), closed := false, waiters := ∅ } } + +/-- +Creates an empty `Full` body with no data. +-/ +def empty : Async Full := do + return { state := ← Mutex.new { data := none, closed := true, waiters := ∅ } } + +/-- +Non-blocking receive. Returns the stored `ByteArray` if available and not yet consumed, +or `none` if the body is empty or already consumed. +-/ +def recv? (full : Full) : Async (Option ByteArray) := do + full.state.atomically do + let st ← get + match st.data with + | some data => + -- Resolve any pending waiters with none (data consumed by this call) + for waiter in st.waiters.toArray do + let lose := return () + let win promise := promise.resolve (.ok none) + waiter.race lose win + set { st with data := none, closed := true, waiters := ∅ } + return some data + | none => + return none + +/-- +Blocking receive. Since `Full` bodies are already fully buffered, this behaves the same as `recv?`. +Returns the stored `ByteArray` if available, or `none` if consumed or empty. +The amount parameter is ignored for fully-buffered bodies. +-/ +def recv (full : Full) (_count : Option UInt64) : Async (Option ByteArray) := + full.recv? + +/-- +Sends data to the body, replacing any previously stored data. +-/ +private partial def tryWakeWaiter [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] + (data : ByteArray) : AtomicT State m Bool := do + match (← get).waiters.dequeue? with + | none => return false + | some (waiter, waiters) => + modify fun st => { st with waiters } + let lose := return false + let win promise := do + promise.resolve (.ok (some (Chunk.ofByteArray data))) + return true + let success ← waiter.race lose win + if success then return true + else tryWakeWaiter data + +/-- +Sends data to the body, replacing any previously stored data. +-/ +def send (full : Full) (data : ByteArray) : Async Unit := do + full.state.atomically do + let success ← tryWakeWaiter data + if !success then + modify fun st => { st with data := some data, closed := false } + +/-- +Checks if the body is closed (consumed or empty). +-/ +def isClosed (full : Full) : Async Bool := do + full.state.atomically do + return (← get).closed + +/-- +Returns the known size of the body if data is available. +-/ +def size? (full : Full) : Async (Option Body.Length) := do + full.state.atomically do + let st ← get + match st.data with + | some data => return some (.fixed data.size) + | none => return none + +open Internal.IO.Async in + +/-- +Creates a `Selector` that resolves once the `Full` body has data available and provides that +data as a `Chunk`. Returns `none` when the body is closed. +-/ +def recvSelector (full : Full) : Selector (Option Chunk) where + tryFn := do + full.state.atomically do + let st ← get + match st.data with + | some data => + set { st with data := none, closed := true } + return some (some (Chunk.ofByteArray data)) + | none => + if st.closed then return some none + else return none + + registerFn waiter := do + full.state.atomically do + let st ← get + match st.data with + | some data => + let lose := return () + let win promise := do + promise.resolve (.ok (some (Chunk.ofByteArray data))) + set { (← get) with data := none, closed := true } + waiter.race lose win + | none => + if st.closed then + let lose := return () + let win promise := promise.resolve (.ok none) + waiter.race lose win + else + modify fun st => { st with waiters := st.waiters.enqueue waiter } + + unregisterFn := do + full.state.atomically do + let st ← get + let waiters ← st.waiters.filterM fun waiter => return !(← waiter.checkFinished) + set { st with waiters } + +end Full + +end Std.Http.Body diff --git a/src/Std/Internal/Http/Data/Body/Length.lean b/src/Std/Internal/Http/Data/Body/Length.lean new file mode 100644 index 000000000000..9996f7f40427 --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Length.lean @@ -0,0 +1,49 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Init.Data.Repr + +public section + +/-! +# Length + +This module defines the `Length` type, that represents the Content-Length or Transfer-Encoding +of an HTTP request or response. +-/ + +namespace Std.Http.Body + +set_option linter.all true + +/-- +Size of the body of a response or request. +-/ +inductive Length + /-- + Indicates that the HTTP message body uses **chunked transfer encoding**. + -/ + | chunked + + /-- + Indicates that the HTTP message body has a **fixed, known length**, as specified by the + `Content-Length` header. + -/ + | fixed (n : Nat) +deriving Repr, BEq + +namespace Length + +/-- +Checks if the `Length` is chunked. +-/ +def isChunked : Length → Bool + | .chunked => true + | _ => false + +end Std.Http.Body.Length diff --git a/src/Std/Internal/Http/Data/Chunk.lean b/src/Std/Internal/Http/Data/Chunk.lean index 3d8a5bddb892..f5ee6a2be2b8 100644 --- a/src/Std/Internal/Http/Data/Chunk.lean +++ b/src/Std/Internal/Http/Data/Chunk.lean @@ -114,10 +114,12 @@ def header (trailer : Trailer) (key : String) (value : String) : Trailer := instance : Encode .v11 Trailer where encode buffer trailer := - let terminalChunk := "0\r\n".toUTF8 - let trailerFields := trailer.headers.fold (init := ByteArray.empty) fun acc key values => + let buffer := buffer.write "0\r\n".toUTF8 + + let buffer := trailer.headers.fold (init := buffer) fun acc key values => values.foldl (init := acc) fun acc value => - acc ++ (key ++ ": " ++ value ++ "\r\n").toUTF8 - buffer.append #[terminalChunk, trailerFields, "\r\n".toUTF8] + acc.write (key ++ ": " ++ value ++ "\r\n").toUTF8 + + buffer.write "\r\n".toUTF8 end Trailer diff --git a/src/Std/Internal/Http/Internal/ChunkedBuffer.lean b/src/Std/Internal/Http/Internal/ChunkedBuffer.lean index 5df9cd48e098..75ba04747303 100644 --- a/src/Std/Internal/Http/Internal/ChunkedBuffer.lean +++ b/src/Std/Internal/Http/Internal/ChunkedBuffer.lean @@ -9,6 +9,7 @@ prelude import Init.Data.Array.Lemmas public import Init.Data.String public import Init.Data.ByteArray +public import Init.Data.Queue public section @@ -32,20 +33,13 @@ structure ChunkedBuffer where /-- The accumulated byte arrays -/ - data : Array ByteArray + data : Queue ByteArray /-- The total size in bytes of all accumulated arrays -/ size : Nat - /-- - `size` is the total size of all accumulated arrays - -/ - size_eq : (data.map (·.size)).sum = size := by simp - -attribute [simp] ChunkedBuffer.size_eq - namespace ChunkedBuffer /-- @@ -53,14 +47,14 @@ An empty `ChunkedBuffer`. -/ @[inline] def empty : ChunkedBuffer := - { data := #[], size := 0 } + { data := .empty, size := 0 } /-- Append a single `ByteArray` to the `ChunkedBuffer`. -/ @[inline] def push (c : ChunkedBuffer) (b : ByteArray) : ChunkedBuffer := - { data := c.data.push b, size := c.size + b.size, size_eq := by simp [← Array.append_singleton] } + { data := c.data.enqueue b, size := c.size + b.size } /-- Writes a `ByteArray` to the `ChunkedBuffer`. @@ -83,36 +77,71 @@ Writes a `String` to the `ChunkedBuffer`. def writeString (buffer : ChunkedBuffer) (data : String) : ChunkedBuffer := buffer.push data.toUTF8 -/-- -Append many ByteArrays at once. --/ -@[inline] -def append (c : ChunkedBuffer) (d : ChunkedBuffer) : ChunkedBuffer := - { data := c.data ++ d.data, size := c.size + d.size } - /-- Turn the combined structure into a single contiguous ByteArray. -/ @[inline] def toByteArray (c : ChunkedBuffer) : ByteArray := - if h : 1 = c.data.size then - c.data[0]'(Nat.le_of_eq h) + let c := c.data.toArray + + if h : 1 = c.size then + c[0]'(Nat.le_of_eq h) else - c.data.foldl (· ++ ·) (.emptyWithCapacity c.size) + c.foldl (· ++ ·) (.emptyWithCapacity c.size) /-- Build from a ByteArray directly. -/ @[inline] def ofByteArray (bs : ByteArray) : ChunkedBuffer := - { data := #[bs], size := bs.size } + { data := .empty |>.enqueue bs, size := bs.size } /-- Build from an array of ByteArrays directly. -/ @[inline] def ofArray (bs : Array ByteArray) : ChunkedBuffer := - { data := bs, size := bs.foldr (·.size + ·) 0, size_eq := by simp [Array.sum, Array.foldr_map'] } + { data := .empty |>.enqueueAll bs.toList , size := bs.foldl (· + ·.size) 0 } + +/-- +Dequeue the first `ByteArray` from the `ChunkedBuffer`, returning it along with the remaining buffer. +Returns `none` if the buffer is empty. +-/ +@[inline] +def dequeue? (c : ChunkedBuffer) : Option (ByteArray × ChunkedBuffer) := + match c.data.dequeue? with + | some (b, rest) => some (b, { data := rest, size := c.size - b.size }) + | none => none + +/-- +Push a `ByteArray` to the front of the `ChunkedBuffer`, so it will be dequeued first. +-/ +@[inline] +def pushFront (c : ChunkedBuffer) (b : ByteArray) : ChunkedBuffer := + { data := { c.data with dList := b :: c.data.dList }, size := c.size + b.size } + +/-- +Extract exactly `n` bytes from the front of the `ChunkedBuffer`. If the buffer contains fewer +than `n` bytes, returns all available bytes. Returns the extracted bytes and the remaining buffer. +-/ +partial def take (c : ChunkedBuffer) (n : Nat) : ByteArray × ChunkedBuffer := + if n ≥ c.size then + (c.toByteArray, empty) + else if n == 0 then + (.empty, c) + else + go (.emptyWithCapacity n) n c +where + go (acc : ByteArray) (remaining : Nat) (buf : ChunkedBuffer) : ByteArray × ChunkedBuffer := + match buf.dequeue? with + | none => (acc, buf) + | some (chunk, rest) => + if chunk.size ≤ remaining then + go (acc ++ chunk) (remaining - chunk.size) rest + else + let taken := chunk.extract 0 remaining + let leftover := chunk.extract remaining chunk.size + (acc ++ taken, rest.pushFront leftover) /-- Check if it's an empty array. @@ -126,19 +155,10 @@ instance : Inhabited ChunkedBuffer := ⟨empty⟩ instance : EmptyCollection ChunkedBuffer where emptyCollection := empty -instance : HAppend ChunkedBuffer ChunkedBuffer ChunkedBuffer where - hAppend := append - instance : Coe ByteArray ChunkedBuffer where coe := ofByteArray instance : Coe (Array ByteArray) ChunkedBuffer where coe := ofArray -instance : Append ChunkedBuffer where - append := append - -instance : Repr ChunkedBuffer where - reprPrec bb _ := s!"ChunkedBuffer.ofArray {bb.data}" - end Std.Http.Internal.ChunkedBuffer diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean new file mode 100644 index 000000000000..1b55d563bee9 --- /dev/null +++ b/tests/lean/run/async_http_body.lean @@ -0,0 +1,301 @@ +import Std.Internal.Http.Data.Body + +open Std.Internal.IO Async +open Std.Http +open Std.Http.Body + +/-! ## ChunkStream tests -/ + +-- Test send followed by recv returns the chunk +def chunkSendRecv : Async Unit := do + let stream ← ChunkStream.empty + let chunk := Chunk.ofByteArray "hello".toUTF8 + stream.send chunk + let result ← stream.recv none + assert! result.isSome + assert! result.get!.data == "hello".toUTF8 + +#eval chunkSendRecv.block + +-- Test tryRecv on empty stream returns none +def chunkTryRecvEmpty : Async Unit := do + let stream ← ChunkStream.empty + let result ← stream.tryRecv + assert! result.isNone + +#eval chunkTryRecvEmpty.block + +-- Test tryRecv returns data when available +def chunkTryRecvWithData : Async Unit := do + let stream ← ChunkStream.empty + stream.send (Chunk.ofByteArray "data".toUTF8) + let result ← stream.tryRecv + assert! result.isSome + assert! result.get!.data == "data".toUTF8 + +#eval chunkTryRecvWithData.block + +-- Test close sets the closed flag +def chunkClose : Async Unit := do + let stream ← ChunkStream.empty + assert! !(← stream.isClosed) + stream.close + assert! (← stream.isClosed) + +#eval chunkClose.block + +-- Test recv on closed stream returns none +def chunkRecvAfterClose : Async Unit := do + let stream ← ChunkStream.empty + stream.close + let result ← stream.recv none + assert! result.isNone + +#eval chunkRecvAfterClose.block + +-- Test FIFO ordering of multiple chunks +def chunkMultipleFIFO : Async Unit := do + let stream ← ChunkStream.empty + stream.send (Chunk.ofByteArray "one".toUTF8) + stream.send (Chunk.ofByteArray "two".toUTF8) + stream.send (Chunk.ofByteArray "three".toUTF8) + let r1 ← stream.recv none + let r2 ← stream.recv none + let r3 ← stream.recv none + assert! r1.get!.data == "one".toUTF8 + assert! r2.get!.data == "two".toUTF8 + assert! r3.get!.data == "three".toUTF8 + +#eval chunkMultipleFIFO.block + +-- Test for-in iteration collects all chunks until close +def chunkForIn : Async Unit := do + let stream ← ChunkStream.empty + stream.send (Chunk.ofByteArray "a".toUTF8) + stream.send (Chunk.ofByteArray "b".toUTF8) + stream.close + + let mut acc : ByteArray := .empty + for chunk in stream do + acc := acc ++ chunk.data + assert! acc == "ab".toUTF8 + +#eval chunkForIn.block + +-- Test chunks preserve extensions +def chunkExtensions : Async Unit := do + let stream ← ChunkStream.empty + let chunk := { data := "hello".toUTF8, extensions := #[("key", some "value")] : Chunk } + stream.send chunk + let result ← stream.recv none + assert! result.isSome + assert! result.get!.extensions.size == 1 + assert! result.get!.extensions[0]! == ("key", some "value") + +#eval chunkExtensions.block + +-- Test set/get known size +def chunkKnownSize : Async Unit := do + let stream ← ChunkStream.empty + stream.setKnownSize (some (.fixed 100)) + let size ← stream.getKnownSize + assert! size == some (.fixed 100) + +#eval chunkKnownSize.block + +-- Test capacity: filling up to capacity succeeds via tryRecv check +def chunkCapacityFull : Async Unit := do + let stream ← ChunkStream.emptyWithCapacity (capacity := 3) + stream.send (Chunk.ofByteArray "a".toUTF8) + stream.send (Chunk.ofByteArray "b".toUTF8) + stream.send (Chunk.ofByteArray "c".toUTF8) + -- All three should be buffered + let r1 ← stream.tryRecv + let r2 ← stream.tryRecv + let r3 ← stream.tryRecv + let r4 ← stream.tryRecv + assert! r1.get!.data == "a".toUTF8 + assert! r2.get!.data == "b".toUTF8 + assert! r3.get!.data == "c".toUTF8 + assert! r4.isNone + +#eval chunkCapacityFull.block + +-- Test capacity: send blocks when buffer is full and resumes after recv +def chunkCapacityBackpressure : Async Unit := do + let stream ← ChunkStream.emptyWithCapacity (capacity := 2) + stream.send (Chunk.ofByteArray "a".toUTF8) + stream.send (Chunk.ofByteArray "b".toUTF8) + + -- Spawn a send that should block because capacity is 2 + let sendTask ← async (t := AsyncTask) <| + stream.send (Chunk.ofByteArray "c".toUTF8) + + -- Consume one to free space + let r1 ← stream.recv none + assert! r1.get!.data == "a".toUTF8 + + -- Wait for the blocked send to complete + sendTask.block + + -- Now we should be able to recv the remaining two + let r2 ← stream.recv none + let r3 ← stream.recv none + assert! r2.get!.data == "b".toUTF8 + assert! r3.get!.data == "c".toUTF8 + +#eval chunkCapacityBackpressure.block + +-- Test capacity 1: only one chunk at a time +def chunkCapacityOne : Async Unit := do + let stream ← ChunkStream.emptyWithCapacity (capacity := 1) + stream.send (Chunk.ofByteArray "first".toUTF8) + + let sendTask ← async (t := AsyncTask) <| + stream.send (Chunk.ofByteArray "second".toUTF8) + + let r1 ← stream.recv none + assert! r1.get!.data == "first".toUTF8 + + sendTask.block + + let r2 ← stream.recv none + assert! r2.get!.data == "second".toUTF8 + +#eval chunkCapacityOne.block + +-- Test close unblocks pending producers +def chunkCloseUnblocksProducers : Async Unit := do + let stream ← ChunkStream.emptyWithCapacity (capacity := 1) + stream.send (Chunk.ofByteArray "fill".toUTF8) + + -- This send should block because buffer is full + let sendTask ← async (t := AsyncTask) <| + try + stream.send (Chunk.ofByteArray "blocked".toUTF8) + catch _ => + pure () + + -- Close should unblock the producer (send gets error internally) + stream.close + + await sendTask + +#eval chunkCloseUnblocksProducers.block + + +/-! ## Full tests -/ + +-- Test ofData followed by recv +def fullOfData : Async Unit := do + let full ← Full.ofData "hello".toUTF8 + let result ← full.recv none + assert! result.isSome + assert! result.get! == "hello".toUTF8 + +#eval fullOfData.block + +-- Test data is consumed exactly once +def fullConsumedOnce : Async Unit := do + let full ← Full.ofData "data".toUTF8 + let r1 ← full.recv none + let r2 ← full.recv none + assert! r1.isSome + assert! r2.isNone + +#eval fullConsumedOnce.block + +-- Test empty Full returns none immediately +def fullEmpty : Async Unit := do + let full ← Full.empty + let result ← full.recv none + assert! result.isNone + +#eval fullEmpty.block + +-- Test isClosed transitions after consumption +def fullClosedAfterConsume : Async Unit := do + let full ← Full.ofData "data".toUTF8 + assert! !(← full.isClosed) + discard <| full.recv none + assert! (← full.isClosed) + +#eval fullClosedAfterConsume.block + +-- Test empty Full is already closed +def fullEmptyIsClosed : Async Unit := do + let full ← Full.empty + assert! (← full.isClosed) + +#eval fullEmptyIsClosed.block + +-- Test size? returns byte count +def fullSize : Async Unit := do + let full ← Full.ofData "hello".toUTF8 + let size ← full.size? + assert! size == some (.fixed 5) + +#eval fullSize.block + +-- Test size? returns none after consumption +def fullSizeAfterConsume : Async Unit := do + let full ← Full.ofData "hello".toUTF8 + discard <| full.recv none + let size ← full.size? + assert! size == none + +#eval fullSizeAfterConsume.block + +-- Test send replaces data +def fullSendReplacesData : Async Unit := do + let full ← Full.empty + full.send "new data".toUTF8 + assert! !(← full.isClosed) + let result ← full.recv none + assert! result.isSome + assert! result.get! == "new data".toUTF8 + +#eval fullSendReplacesData.block + +-- Test recv? behaves the same as recv +def fullRecvQuestion : Async Unit := do + let full ← Full.ofData "test".toUTF8 + let r1 ← full.recv? + assert! r1.isSome + assert! r1.get! == "test".toUTF8 + let r2 ← full.recv? + assert! r2.isNone + +#eval fullRecvQuestion.block + +-- Test Full from String type +def fullFromString : Async Unit := do + let full ← Full.ofData (β := String) "hello world" + let result ← full.recv none + assert! result.isSome + assert! result.get! == "hello world".toUTF8 + +#eval fullFromString.block + +/-! ## Body typeclass tests -/ + +-- Test Body instance for ChunkStream +def bodyChunkStream : Async Unit := do + let stream : ChunkStream ← Body.empty + Body.send stream (Chunk.ofByteArray "hello".toUTF8) + let result ← Body.recv stream none + assert! result.isSome + assert! result.get!.data == "hello".toUTF8 + assert! !(← Body.isClosed stream) + +#eval bodyChunkStream.block + +-- Test Body instance for Full +def bodyFull : Async Unit := do + let full ← Full.ofData "hello".toUTF8 + let result ← @Body.recv Body.Full Chunk _ full none + assert! result.isSome + assert! result.get!.data == "hello".toUTF8 + assert! (← @Body.isClosed Body.Full Chunk _ full) + +#eval bodyFull.block From 18bc715bad7fc5f566f252893dc2f9648e929021 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sat, 24 Jan 2026 20:43:59 -0300 Subject: [PATCH 02/44] feat: remove useless functions --- .../Internal/Http/Internal/ChunkedBuffer.lean | 39 ++++--------------- 1 file changed, 8 insertions(+), 31 deletions(-) diff --git a/src/Std/Internal/Http/Internal/ChunkedBuffer.lean b/src/Std/Internal/Http/Internal/ChunkedBuffer.lean index 75ba04747303..71369cd0add5 100644 --- a/src/Std/Internal/Http/Internal/ChunkedBuffer.lean +++ b/src/Std/Internal/Http/Internal/ChunkedBuffer.lean @@ -63,6 +63,13 @@ Writes a `ByteArray` to the `ChunkedBuffer`. def write (buffer : ChunkedBuffer) (data : ByteArray) : ChunkedBuffer := buffer.push data +/-- +Writes a `ChunkedBuffer` to the `ChunkedBuffer`. +-/ +@[inline] +def append (buffer : ChunkedBuffer) (data : ChunkedBuffer) : ChunkedBuffer := + { data := buffer.data.enqueueAll data.data.toArray.toList.reverse, size := buffer.size + data.size } + /-- Writes a `Char` to the `ChunkedBuffer`. -/ @@ -101,7 +108,7 @@ Build from an array of ByteArrays directly. -/ @[inline] def ofArray (bs : Array ByteArray) : ChunkedBuffer := - { data := .empty |>.enqueueAll bs.toList , size := bs.foldl (· + ·.size) 0 } + { data := .empty |>.enqueueAll bs.reverse.toList , size := bs.foldl (· + ·.size) 0 } /-- Dequeue the first `ByteArray` from the `ChunkedBuffer`, returning it along with the remaining buffer. @@ -113,36 +120,6 @@ def dequeue? (c : ChunkedBuffer) : Option (ByteArray × ChunkedBuffer) := | some (b, rest) => some (b, { data := rest, size := c.size - b.size }) | none => none -/-- -Push a `ByteArray` to the front of the `ChunkedBuffer`, so it will be dequeued first. --/ -@[inline] -def pushFront (c : ChunkedBuffer) (b : ByteArray) : ChunkedBuffer := - { data := { c.data with dList := b :: c.data.dList }, size := c.size + b.size } - -/-- -Extract exactly `n` bytes from the front of the `ChunkedBuffer`. If the buffer contains fewer -than `n` bytes, returns all available bytes. Returns the extracted bytes and the remaining buffer. --/ -partial def take (c : ChunkedBuffer) (n : Nat) : ByteArray × ChunkedBuffer := - if n ≥ c.size then - (c.toByteArray, empty) - else if n == 0 then - (.empty, c) - else - go (.emptyWithCapacity n) n c -where - go (acc : ByteArray) (remaining : Nat) (buf : ChunkedBuffer) : ByteArray × ChunkedBuffer := - match buf.dequeue? with - | none => (acc, buf) - | some (chunk, rest) => - if chunk.size ≤ remaining then - go (acc ++ chunk) (remaining - chunk.size) rest - else - let taken := chunk.extract 0 remaining - let leftover := chunk.extract remaining chunk.size - (acc ++ taken, rest.pushFront leftover) - /-- Check if it's an empty array. -/ From 3c41d3961ed5986c466c35481276227f535431e6 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sun, 25 Jan 2026 01:48:27 -0300 Subject: [PATCH 03/44] feat: empty body and constructors --- src/Std/Internal/Http/Data/Body.lean | 18 ++ .../Internal/Http/Data/Body/ChunkStream.lean | 4 +- src/Std/Internal/Http/Data/Body/Empty.lean | 121 +++++++++++ src/Std/Internal/Http/Data/Body/Full.lean | 193 ++++++++++++------ src/Std/Internal/Http/Data/Request.lean | 9 - src/Std/Internal/Http/Data/Response.lean | 8 - tests/lean/run/async_http_body.lean | 166 ++++++++++++++- 7 files changed, 433 insertions(+), 86 deletions(-) create mode 100644 src/Std/Internal/Http/Data/Body/Empty.lean diff --git a/src/Std/Internal/Http/Data/Body.lean b/src/Std/Internal/Http/Data/Body.lean index 30b8d5b63456..327356d61301 100644 --- a/src/Std/Internal/Http/Data/Body.lean +++ b/src/Std/Internal/Http/Data/Body.lean @@ -11,6 +11,7 @@ public import Std.Internal.Http.Data.Headers public import Std.Internal.Http.Data.Body.Length public import Std.Internal.Http.Data.Body.ChunkStream public import Std.Internal.Http.Data.Body.Full +public import Std.Internal.Http.Data.Body.Empty public section @@ -71,6 +72,11 @@ class Body (α : Type) (β : outParam Type) where -/ recvSelector : α → Selector (Option β) + /-- + Closes the stream + -/ + close : α → Async Unit + instance : Body Body.ChunkStream Chunk where recv? := Body.ChunkStream.tryRecv recv := Body.ChunkStream.recv @@ -79,6 +85,7 @@ instance : Body Body.ChunkStream Chunk where size? := Body.ChunkStream.getKnownSize empty := Body.ChunkStream.empty recvSelector := Body.ChunkStream.recvSelector + close := Body.ChunkStream.close instance : Body Body.Full Chunk where recv? full := do return (← Body.Full.recv? full).map Chunk.ofByteArray @@ -88,5 +95,16 @@ instance : Body Body.Full Chunk where size? := Body.Full.size? empty := Body.Full.empty recvSelector := Body.Full.recvSelector + close := Body.Full.close + +instance : Body Body.Empty Chunk where + recv? empty := do return (← Body.Empty.recv? empty).map Chunk.ofByteArray + recv empty count := do return (← Body.Empty.recv empty count).map Chunk.ofByteArray + send empty chunk := Body.Empty.send empty chunk.data + isClosed := Body.Empty.isClosed + size? := Body.Empty.size? + empty := Body.Empty.new + recvSelector := Body.Empty.recvSelector + close := Body.Empty.close end Std.Http diff --git a/src/Std/Internal/Http/Data/Body/ChunkStream.lean b/src/Std/Internal/Http/Data/Body/ChunkStream.lean index 1d0eddf4024c..2c31586fcde6 100644 --- a/src/Std/Internal/Http/Data/Body/ChunkStream.lean +++ b/src/Std/Internal/Http/Data/Body/ChunkStream.lean @@ -358,6 +358,4 @@ instance : ForIn Async ChunkStream Chunk where instance : ForIn ContextAsync ChunkStream Chunk where forIn := Std.Http.Body.ChunkStream.forIn' -end ChunkStream - -end Std.Http.Body +end Std.Http.Body.ChunkStream diff --git a/src/Std/Internal/Http/Data/Body/Empty.lean b/src/Std/Internal/Http/Data/Body/Empty.lean new file mode 100644 index 000000000000..e341f6f629eb --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Empty.lean @@ -0,0 +1,121 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Async +public import Std.Internal.Http.Data.Chunk +public import Std.Internal.Http.Data.Request +public import Std.Internal.Http.Data.Response +public import Std.Internal.Http.Data.Body.Length + +public section + +/-! +# Empty + +An `Empty` represents an HTTP body with no content. This is useful for requests and responses +that do not carry a body, such as GET requests or 204 No Content responses. +-/ + +namespace Std.Http.Body +open Std Internal IO Async + +set_option linter.all true + +/-- +An empty HTTP body type that contains no data. +-/ +structure Empty where + private mk :: +deriving Inhabited, Nonempty + +namespace Empty + +/-- +The singleton empty body value. +-/ +def val : Empty := Empty.mk + +/-- +Creates a new empty body. +-/ +def new : Async Empty := + pure val + +/-- +Non-blocking receive. Always returns `none` since there is no data. +-/ +def recv? (_ : Empty) : Async (Option ByteArray) := + pure none + +/-- +Blocking receive. Always returns `none` since there is no data. +-/ +def recv (_ : Empty) (_ : Option UInt64) : Async (Option ByteArray) := + pure none + +/-- +Sending to an empty body is a no-op. +-/ +def send (_ : Empty) (_ : ByteArray) : Async Unit := + pure () + +/-- +An empty body is always closed. +-/ +def isClosed (_ : Empty) : Async Bool := + pure true + +/-- +Returns `none` since an empty body has no size. +-/ +def size? (_ : Empty) : Async (Option Body.Length) := + pure (some (.fixed 0)) + +/-- +Closing an empty body is a no-op. +-/ +def close (_ : Empty) : Async Unit := + pure () + +/-- +Creates a `Selector` that immediately resolves to `none`. +-/ +def recvSelector (_ : Empty) : Selector (Option Chunk) where + tryFn := pure (some none) + registerFn waiter := do + let lose := return () + let win promise := promise.resolve (.ok none) + waiter.race lose win + unregisterFn := pure () + +instance : EmptyCollection Empty where + emptyCollection := val + +end Empty + +end Std.Http.Body + +namespace Std.Http.Request.Builder + +/-- +Builds a request with an empty body. +-/ +def blank (builder : Builder) : Request Body.Empty := + { head := builder.head, body := Body.Empty.val } + +end Std.Http.Request.Builder + +namespace Std.Http.Response.Builder + +/-- +Builds a response with an empty body. +-/ +def blank (builder : Builder) : Response Body.Empty := + { head := builder.head, body := Body.Empty.val } + +end Std.Http.Response.Builder diff --git a/src/Std/Internal/Http/Data/Body/Full.lean b/src/Std/Internal/Http/Data/Body/Full.lean index 4a86a36446d8..278a75f50190 100644 --- a/src/Std/Internal/Http/Data/Body/Full.lean +++ b/src/Std/Internal/Http/Data/Body/Full.lean @@ -9,6 +9,8 @@ prelude public import Std.Sync public import Std.Internal.Async public import Std.Internal.Http.Data.Chunk +public import Std.Internal.Http.Data.Request +public import Std.Internal.Http.Data.Response public import Std.Internal.Http.Data.Body.Length public import Init.Data.ByteArray @@ -55,11 +57,6 @@ private structure Full.State where Whether the body has been closed. -/ closed : Bool - - /-- - Waiters registered via `recvSelector` waiting for data to become available. - -/ - waiters : Std.Queue (Waiter (Option Chunk)) deriving Nonempty /-- @@ -76,14 +73,21 @@ namespace Full /-- Creates a new `Full` body containing the given data converted to `ByteArray`. -/ -def ofData [ToByteArray β] (data : β) : Async Full := do - return { state := ← Mutex.new { data := some (ToByteArray.toByteArray data), closed := false, waiters := ∅ } } +def new [ToByteArray β] (data : β) : Async Full := do + return { state := ← Mutex.new { data := some (ToByteArray.toByteArray data), closed := false } } /-- Creates an empty `Full` body with no data. -/ def empty : Async Full := do - return { state := ← Mutex.new { data := none, closed := true, waiters := ∅ } } + return { state := ← Mutex.new { data := none, closed := true } } + +/-- +Closes a `Full` +-/ +def close (full : Full) : Async Unit := do + full.state.atomically do + modify ({ · with closed := true }) /-- Non-blocking receive. Returns the stored `ByteArray` if available and not yet consumed, @@ -94,12 +98,7 @@ def recv? (full : Full) : Async (Option ByteArray) := do let st ← get match st.data with | some data => - -- Resolve any pending waiters with none (data consumed by this call) - for waiter in st.waiters.toArray do - let lose := return () - let win promise := promise.resolve (.ok none) - waiter.race lose win - set { st with data := none, closed := true, waiters := ∅ } + set { st with data := none, closed := true } return some data | none => return none @@ -112,31 +111,12 @@ The amount parameter is ignored for fully-buffered bodies. def recv (full : Full) (_count : Option UInt64) : Async (Option ByteArray) := full.recv? -/-- -Sends data to the body, replacing any previously stored data. --/ -private partial def tryWakeWaiter [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] - (data : ByteArray) : AtomicT State m Bool := do - match (← get).waiters.dequeue? with - | none => return false - | some (waiter, waiters) => - modify fun st => { st with waiters } - let lose := return false - let win promise := do - promise.resolve (.ok (some (Chunk.ofByteArray data))) - return true - let success ← waiter.race lose win - if success then return true - else tryWakeWaiter data - /-- Sends data to the body, replacing any previously stored data. -/ def send (full : Full) (data : ByteArray) : Async Unit := do full.state.atomically do - let success ← tryWakeWaiter data - if !success then - modify fun st => { st with data := some data, closed := false } + modify fun st => { st with data := some data, closed := false } /-- Checks if the body is closed (consumed or empty). @@ -174,29 +154,126 @@ def recvSelector (full : Full) : Selector (Option Chunk) where else return none registerFn waiter := do - full.state.atomically do - let st ← get - match st.data with - | some data => - let lose := return () - let win promise := do - promise.resolve (.ok (some (Chunk.ofByteArray data))) - set { (← get) with data := none, closed := true } - waiter.race lose win - | none => - if st.closed then - let lose := return () - let win promise := promise.resolve (.ok none) - waiter.race lose win - else - modify fun st => { st with waiters := st.waiters.enqueue waiter } - - unregisterFn := do - full.state.atomically do - let st ← get - let waiters ← st.waiters.filterM fun waiter => return !(← waiter.checkFinished) - set { st with waiters } + let lose := return () + let win promise := do + let r ← full.recv? + match r with + | some data => promise.resolve (.ok (some (Chunk.ofByteArray data))) + | none => promise.resolve (.ok none) + waiter.race lose win + + unregisterFn := pure () + +end Body.Full + +namespace Request.Builder +open Internal.IO.Async + +/-- +Builds a request with a text body. Sets Content-Type to text/plain and Content-Length automatically. +-/ +def text (builder : Builder) (content : String) : Async (Request Body.Full) := do + let bytes := content.toUTF8 + let body ← Body.Full.new bytes + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. +-/ +def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) := do + let body ← Body.Full.new content + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with a JSON body. Sets Content-Type to application/json and Content-Length automatically. +-/ +def json (builder : Builder) (content : String) : Async (Request Body.Full) := do + let bytes := content.toUTF8 + let body ← Body.Full.new bytes + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with an HTML body. Sets Content-Type to text/html and Content-Length automatically. +-/ +def html (builder : Builder) (content : String) : Async (Request Body.Full) := do + let bytes := content.toUTF8 + let body ← Body.Full.new bytes + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with an empty Full body. +-/ +def noBody (builder : Builder) : Async (Request Body.Full) := do + let body ← Body.Full.empty + return { head := builder.head, body } + +end Request.Builder + +namespace Response.Builder +open Internal.IO.Async + +/-- +Builds a response with a text body. Sets Content-Type to text/plain and Content-Length automatically. +-/ +def text (builder : Builder) (content : String) : Async (Response Body.Full) := do + let bytes := content.toUTF8 + let body ← Body.Full.new bytes + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. +-/ +def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) := do + let body ← Body.Full.new content + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with a JSON body. Sets Content-Type to application/json and Content-Length automatically. +-/ +def json (builder : Builder) (content : String) : Async (Response Body.Full) := do + let bytes := content.toUTF8 + let body ← Body.Full.new bytes + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with an HTML body. Sets Content-Type to text/html and Content-Length automatically. +-/ +def html (builder : Builder) (content : String) : Async (Response Body.Full) := do + let bytes := content.toUTF8 + let body ← Body.Full.new bytes + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with an empty Full body. +-/ +def noBody (builder : Builder) : Async (Response Body.Full) := do + let body ← Body.Full.empty + return { head := builder.head, body } -end Full +end Response.Builder -end Std.Http.Body +end Std.Http diff --git a/src/Std/Internal/Http/Data/Request.lean b/src/Std/Internal/Http/Data/Request.lean index d4db6a112bfd..9536771ede4f 100644 --- a/src/Std/Internal/Http/Data/Request.lean +++ b/src/Std/Internal/Http/Data/Request.lean @@ -75,15 +75,6 @@ structure Request.Builder where namespace Request -instance : ToString Head where - toString req := - toString req.method ++ " " ++ - toString req.uri ++ " " ++ - toString req.version ++ - "\r\n" ++ - toString req.headers ++ - "\r\n" - open Internal in instance : Encode .v11 Head where encode buffer req := diff --git a/src/Std/Internal/Http/Data/Response.lean b/src/Std/Internal/Http/Data/Response.lean index bdbb13e32be6..42a5a34206ec 100644 --- a/src/Std/Internal/Http/Data/Response.lean +++ b/src/Std/Internal/Http/Data/Response.lean @@ -74,14 +74,6 @@ structure Response.Builder where namespace Response -instance : ToString Head where - toString r := - toString r.version ++ " " ++ - toString r.status.toCode ++ " " ++ - toString r.status ++ "\r\n" ++ - toString r.headers ++ - "\r\n" - open Internal in instance : Encode .v11 Head where encode buffer r := diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 1b55d563bee9..b711c005532d 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -188,7 +188,7 @@ def chunkCloseUnblocksProducers : Async Unit := do -- Test ofData followed by recv def fullOfData : Async Unit := do - let full ← Full.ofData "hello".toUTF8 + let full ← Full.new "hello".toUTF8 let result ← full.recv none assert! result.isSome assert! result.get! == "hello".toUTF8 @@ -197,7 +197,7 @@ def fullOfData : Async Unit := do -- Test data is consumed exactly once def fullConsumedOnce : Async Unit := do - let full ← Full.ofData "data".toUTF8 + let full ← Full.new "data".toUTF8 let r1 ← full.recv none let r2 ← full.recv none assert! r1.isSome @@ -215,7 +215,7 @@ def fullEmpty : Async Unit := do -- Test isClosed transitions after consumption def fullClosedAfterConsume : Async Unit := do - let full ← Full.ofData "data".toUTF8 + let full ← Full.new "data".toUTF8 assert! !(← full.isClosed) discard <| full.recv none assert! (← full.isClosed) @@ -231,7 +231,7 @@ def fullEmptyIsClosed : Async Unit := do -- Test size? returns byte count def fullSize : Async Unit := do - let full ← Full.ofData "hello".toUTF8 + let full ← Full.new "hello".toUTF8 let size ← full.size? assert! size == some (.fixed 5) @@ -239,7 +239,7 @@ def fullSize : Async Unit := do -- Test size? returns none after consumption def fullSizeAfterConsume : Async Unit := do - let full ← Full.ofData "hello".toUTF8 + let full ← Full.new "hello".toUTF8 discard <| full.recv none let size ← full.size? assert! size == none @@ -259,7 +259,7 @@ def fullSendReplacesData : Async Unit := do -- Test recv? behaves the same as recv def fullRecvQuestion : Async Unit := do - let full ← Full.ofData "test".toUTF8 + let full ← Full.new "test".toUTF8 let r1 ← full.recv? assert! r1.isSome assert! r1.get! == "test".toUTF8 @@ -270,7 +270,7 @@ def fullRecvQuestion : Async Unit := do -- Test Full from String type def fullFromString : Async Unit := do - let full ← Full.ofData (β := String) "hello world" + let full ← Full.new (β := String) "hello world" let result ← full.recv none assert! result.isSome assert! result.get! == "hello world".toUTF8 @@ -292,10 +292,160 @@ def bodyChunkStream : Async Unit := do -- Test Body instance for Full def bodyFull : Async Unit := do - let full ← Full.ofData "hello".toUTF8 + let full ← Full.new "hello".toUTF8 let result ← @Body.recv Body.Full Chunk _ full none assert! result.isSome assert! result.get!.data == "hello".toUTF8 assert! (← @Body.isClosed Body.Full Chunk _ full) #eval bodyFull.block + +/-! ## Empty body tests -/ + +-- Test Empty body recv returns none +def emptyRecv : Async Unit := do + let empty ← Body.Empty.new + let result ← empty.recv none + assert! result.isNone + +#eval emptyRecv.block + +-- Test Empty body is always closed +def emptyIsClosed : Async Unit := do + let empty ← Body.Empty.new + assert! (← empty.isClosed) + +#eval emptyIsClosed.block + +-- Test Empty body size is 0 +def emptySize : Async Unit := do + let empty ← Body.Empty.new + let size ← empty.size? + assert! size == some (.fixed 0) + +#eval emptySize.block + +-- Test Body instance for Empty +def bodyEmpty : Async Unit := do + let empty : Body.Empty ← Body.empty + let result ← @Body.recv Body.Empty Chunk _ empty none + assert! result.isNone + assert! (← @Body.isClosed Body.Empty Chunk _ empty) + +#eval bodyEmpty.block + +/-! ## Request.Builder Full body tests -/ + +-- Test Request.Builder.text sets correct headers +def requestBuilderText : Async Unit := do + let req ← Request.post (.originForm! "/api") + |>.text "Hello, World!" + assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") + assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") + let body ← req.body.recv? + assert! body.isSome + assert! body.get! == "Hello, World!".toUTF8 + +#eval requestBuilderText.block + +-- Test Request.Builder.json sets correct headers +def requestBuilderJson : Async Unit := do + let req ← Request.post (.originForm! "/api") + |>.json "{\"key\": \"value\"}" + assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") + let body ← req.body.recv? + assert! body.isSome + assert! body.get! == "{\"key\": \"value\"}".toUTF8 + +#eval requestBuilderJson.block + +-- Test Request.Builder.bytes sets correct headers +def requestBuilderBytes : Async Unit := do + let data := ByteArray.mk #[0x01, 0x02, 0x03] + let req ← Request.post (.originForm! "/api") + |>.bytes data + assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/octet-stream") + assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "3") + let body ← req.body.recv? + assert! body.isSome + assert! body.get! == data + +#eval requestBuilderBytes.block + +-- Test Request.Builder.html sets correct headers +def requestBuilderHtml : Async Unit := do + let req ← Request.post (.originForm! "/api") + |>.html "" + assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/html; charset=utf-8") + let body ← req.body.recv? + assert! body.isSome + +#eval requestBuilderHtml.block + +-- Test Request.Builder.noBody creates empty body +def requestBuilderNoBody : Async Unit := do + let req ← Request.get (.originForm! "/api") + |>.noBody + let body ← req.body.recv? + assert! body.isNone + +#eval requestBuilderNoBody.block + +/-! ## Response.Builder Full body tests -/ + +-- Test Response.Builder.text sets correct headers +def responseBuilderText : Async Unit := do + let res ← Response.ok + |>.text "Hello, World!" + assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") + assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") + let body ← res.body.recv? + assert! body.isSome + assert! body.get! == "Hello, World!".toUTF8 + +#eval responseBuilderText.block + +-- Test Response.Builder.json sets correct headers +def responseBuilderJson : Async Unit := do + let res ← Response.ok + |>.json "{\"status\": \"ok\"}" + assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") + let body ← res.body.recv? + assert! body.isSome + +#eval responseBuilderJson.block + +-- Test Response.Builder.noBody creates empty body +def responseBuilderNoBody : Async Unit := do + let res ← Response.ok + |>.noBody + let body ← res.body.recv? + assert! body.isNone + +#eval responseBuilderNoBody.block + +/-! ## ChunkStream builder tests -/ + +-- Test Request.Builder.textChunked +def requestBuilderTextChunked : Async Unit := do + let req ← Request.post (.originForm! "/api") + |>.textChunked "Hello, Chunked!" + assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") + assert! req.head.headers.get? Header.Name.transferEncoding == some (Header.Value.ofString! "chunked") + let chunk ← req.body.recv none + assert! chunk.isSome + assert! chunk.get!.data == "Hello, Chunked!".toUTF8 + +#eval requestBuilderTextChunked.block + +-- Test Response.Builder.textChunked +def responseBuilderTextChunked : Async Unit := do + let res ← Response.ok + |>.textChunked "Hello, Chunked!" + assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") + assert! res.head.headers.get? Header.Name.transferEncoding == some (Header.Value.ofString! "chunked") + let chunk ← res.body.recv none + assert! chunk.isSome + assert! chunk.get!.data == "Hello, Chunked!".toUTF8 + +#eval responseBuilderTextChunked.block From f8ad249e427f134cfad87a4cb2f4755e17c0a0d0 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sun, 25 Jan 2026 12:40:41 -0300 Subject: [PATCH 04/44] test: wrong test --- tests/lean/run/async_http_body.lean | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index b711c005532d..ae7dd303e449 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -423,29 +423,3 @@ def responseBuilderNoBody : Async Unit := do assert! body.isNone #eval responseBuilderNoBody.block - -/-! ## ChunkStream builder tests -/ - --- Test Request.Builder.textChunked -def requestBuilderTextChunked : Async Unit := do - let req ← Request.post (.originForm! "/api") - |>.textChunked "Hello, Chunked!" - assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") - assert! req.head.headers.get? Header.Name.transferEncoding == some (Header.Value.ofString! "chunked") - let chunk ← req.body.recv none - assert! chunk.isSome - assert! chunk.get!.data == "Hello, Chunked!".toUTF8 - -#eval requestBuilderTextChunked.block - --- Test Response.Builder.textChunked -def responseBuilderTextChunked : Async Unit := do - let res ← Response.ok - |>.textChunked "Hello, Chunked!" - assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") - assert! res.head.headers.get? Header.Name.transferEncoding == some (Header.Value.ofString! "chunked") - let chunk ← res.body.recv none - assert! chunk.isSome - assert! chunk.get!.data == "Hello, Chunked!".toUTF8 - -#eval responseBuilderTextChunked.block From 53a6355074c5d9228028fd48d3ad5164b7c6a50c Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sat, 24 Jan 2026 20:58:34 -0300 Subject: [PATCH 05/44] feat: H1 protocol --- src/Std/Internal/Http/Protocol/H1.lean | 652 ++++++++++++++++++ src/Std/Internal/Http/Protocol/H1/Config.lean | 97 +++ src/Std/Internal/Http/Protocol/H1/Error.lean | 98 +++ src/Std/Internal/Http/Protocol/H1/Event.lean | 73 ++ .../Internal/Http/Protocol/H1/Message.lean | 130 ++++ src/Std/Internal/Http/Protocol/H1/Parser.lean | 314 +++++++++ src/Std/Internal/Http/Protocol/H1/Reader.lean | 269 ++++++++ src/Std/Internal/Http/Protocol/H1/Writer.lean | 265 +++++++ 8 files changed, 1898 insertions(+) create mode 100644 src/Std/Internal/Http/Protocol/H1.lean create mode 100644 src/Std/Internal/Http/Protocol/H1/Config.lean create mode 100644 src/Std/Internal/Http/Protocol/H1/Error.lean create mode 100644 src/Std/Internal/Http/Protocol/H1/Event.lean create mode 100644 src/Std/Internal/Http/Protocol/H1/Message.lean create mode 100644 src/Std/Internal/Http/Protocol/H1/Parser.lean create mode 100644 src/Std/Internal/Http/Protocol/H1/Reader.lean create mode 100644 src/Std/Internal/Http/Protocol/H1/Writer.lean diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean new file mode 100644 index 000000000000..0fd847955aa5 --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1.lean @@ -0,0 +1,652 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Time +public import Std.Internal.Http.Data +public import Std.Internal.Http.Internal +public import Std.Internal.Http.Protocol.H1.Parser +public import Std.Internal.Http.Protocol.H1.Config +public import Std.Internal.Http.Protocol.H1.Message +public import Std.Internal.Http.Protocol.H1.Reader +public import Std.Internal.Http.Protocol.H1.Writer +public import Std.Internal.Http.Protocol.H1.Event + +public section + +/-! +# HTTP/1.1 Protocol State Machine + +This module implements the core HTTP/1.1 protocol state machine that handles +parsing requests/responses and generating output. The machine is direction-aware, +supporting both server mode (receiving requests) and client mode (receiving responses). +-/ + +namespace Std.Http.Protocol.H1 + +set_option linter.all true + +open Std Internal Parsec ByteArray +open Internal + +/-- +Results from a single step of the state machine. +-/ +structure StepResult (dir : Direction) where + + /-- + Events that occurred during this step (e.g., headers received, data available, errors). + -/ + events : Array (Event dir) := #[] + + /-- + Output data ready to be sent to the socket. + -/ + output : ChunkedBuffer := .empty + +/-- +The HTTP 1.1 protocol state machine. +-/ +structure Machine (dir : Direction) where + + /-- + The state of the reader. + -/ + reader : Reader dir := {} + + /-- + The state of the writer. + -/ + writer : Writer dir := {} + + /-- + The configuration. + -/ + config : Config + + /-- + Events that happened during reading and writing. + -/ + events : Array (Event dir) := #[] + + /-- + Error thrown by the machine. + -/ + error : Option Error := none + + /-- + The timestamp for the `Date` header. + -/ + instant : Option (Std.Time.DateTime .UTC) := none + + /-- + If the connection will be kept alive after the message. + -/ + keepAlive : Bool := config.enableKeepAlive + + /-- + Whether a forced flush has been requested by the user. + -/ + forcedFlush : Bool := false + + /-- + Host header. + -/ + host : Option Header.Value := none + +namespace Machine + +@[inline] +private def modifyWriter (machine : Machine dir) (fn : Writer dir → Writer dir) : Machine dir := + { machine with writer := fn machine.writer } + +@[inline] +private def modifyReader (machine : Machine dir) (fn : Reader dir → Reader dir) : Machine dir := + { machine with reader := fn machine.reader } + +@[inline] +private def setReaderState (machine : Machine dir) (state : Reader.State dir) : Machine dir := + machine.modifyReader ({ · with state }) + +@[inline] +private def setWriterState (machine : Machine dir) (state : Writer.State) : Machine dir := + machine.modifyWriter ({ · with state }) + +@[inline] +private def addEvent (machine : Machine dir) (event : Event dir) : Machine dir := + { machine with events := machine.events.push event } + +@[inline] +private def setEvent (machine : Machine dir) (event : Option (Event dir)) : Machine dir := + match event with + | some event => machine.addEvent event + | none => machine + +@[inline] +private def setError (machine : Machine dir) (error : Error) : Machine dir := + { machine with error := some error } + +@[inline] +private def disableKeepAlive (machine : Machine dir) : Machine dir := + { machine with keepAlive := false } + +@[inline] +private def setFailure (machine : Machine dir) (error : H1.Error) : Machine dir := + machine + |>.addEvent (.failed error) + |>.setReaderState (.failed error) + |>.setError error + +@[inline] +private def updateKeepAlive (machine : Machine dir) (should : Bool) : Machine dir := + { machine with keepAlive := machine.keepAlive ∧ should } + +-- Helper Functions + +private def isChunked (headers : Headers) : Option Bool := + if let some res := headers.get? Header.Name.transferEncoding then + let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower) + if encodings.isEmpty then + none + else + let chunkedCount := encodings.filter (· == "chunked") |>.size + let lastIsChunked := encodings.back? == some "chunked" + + if chunkedCount > 1 then + none + else if chunkedCount = 1 ∧ ¬lastIsChunked then + none + else + some lastIsChunked + else + some false + +private def extractBodyLengthFromHeaders (headers : Headers) : Option Body.Length := + match (headers.get? Header.Name.contentLength, isChunked headers) with + | (some cl, some false) => cl.value.toNat? >>= (some ∘ Body.Length.fixed) + | (_, some true) => some Body.Length.chunked + | _ => none + +private def checkMessageHead (message : Message.Head dir) : Option Body.Length := do + match dir with + | .receiving => guard (message.headers.get? Header.Name.host |>.isSome) + | .sending => pure () + + if let .receiving := dir then + if message.method == .head ∨ message.method == .connect then + return .fixed 0 + + message.getSize false + +-- State Checks + +/-- +Returns `true` if the reader is in a failed state. +-/ +@[inline] +def failed (machine : Machine dir) : Bool := + match machine.reader.state with + | .failed _ => true + | _ => false + +/-- +Returns `true` if the reader has completed successfully. +-/ +@[inline] +def isReaderComplete (machine : Machine dir) : Bool := + match machine.reader.state with + | .complete => true + | _ => false + +/-- +Returns `true` if the reader is closed. +-/ +@[inline] +def isReaderClosed (machine : Machine dir) : Bool := + match machine.reader.state with + | .closed => true + | _ => false + +/-- +Returns `true` if the machine should flush buffered output. +-/ +@[inline] +def shouldFlush (machine : Machine dir) : Bool := + machine.failed ∨ + machine.reader.state == .closed ∨ + machine.writer.isReadyToSend ∨ + machine.writer.knownSize.isSome + +/-- +Returns `true` if the writer is waiting for headers of a new message. +-/ +@[inline] +def isWaitingMessage (machine : Machine dir) : Bool := + machine.writer.state == .waitingHeaders ∧ + ¬machine.writer.sentMessage + +/-- +Returns `true` if both reader and writer are closed and no output remains. +-/ +@[inline] +def halted (machine : Machine dir) : Bool := + match machine.reader.state, machine.writer.state with + | .closed, .closed => machine.writer.outputData.isEmpty + | _, _ => false + +private def parseWith (machine : Machine dir) (parser : Parser α) (limit : Option Nat) + (expect : Option Nat := none) : Machine dir × Option α := + let remaining := machine.reader.input.remainingBytes + match parser machine.reader.input with + | .success buffer result => + ({ machine with reader := machine.reader.setInput buffer }, some result) + | .error it .eof => + let usedBytesUntilFailure := remaining - it.remainingBytes + if machine.reader.noMoreInput then + (machine.setFailure .connectionClosed, none) + else if let some limit := limit then + if usedBytesUntilFailure ≥ limit + then (machine.setFailure .badMessage, none) + else (machine.addEvent (.needMoreData expect), none) + else + (machine.addEvent (.needMoreData expect), none) + | .error _ _ => + (machine.setFailure .badMessage, none) + +-- Message Processing + +private def resetForNextMessage (machine : Machine ty) : Machine ty := + + if machine.keepAlive then + { machine with + reader := { + state := .needStartLine, + input := machine.reader.input, + messageHead := {}, + messageCount := machine.reader.messageCount + 1 + }, + writer := { + userData := .empty, + outputData := machine.writer.outputData, + state := .pending, + knownSize := none, + messageHead := {}, + userClosedBody := false, + sentMessage := false + }, + events := machine.events.push .next, + error := none + } + else + machine.addEvent .close + |>.setWriterState .closed + |>.setReaderState .closed + +/- +This function processes the message we are receiving +-/ +private def processHeaders (machine : Machine dir) : Machine dir := + let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages) + + let shouldKeepAlive : Bool := machine.reader.messageHead.shouldKeepAlive + let machine := updateKeepAlive machine shouldKeepAlive + + match checkMessageHead machine.reader.messageHead with + | none => machine.setFailure .badMessage + | some size => + let size := match size with + | .fixed n => .needFixedBody n + | .chunked => .needChunkedSize + + let machine := machine.addEvent (.endHeaders machine.reader.messageHead) + + machine.setReaderState size + |>.setWriterState .waitingHeaders + |>.addEvent .needAnswer + +/-- +This processes the message we are sending. +-/ +def setHeaders (messageHead : Message.Head dir.swap) (machine : Machine dir) : Machine dir := + let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages) + + let shouldKeepAlive := messageHead.shouldKeepAlive + let machine := machine.updateKeepAlive shouldKeepAlive + let size := Writer.determineTransferMode machine.writer + + let headers := + if messageHead.headers.contains .host then + messageHead.headers + else if let some host := machine.host then + messageHead.headers.insert .host host + else + messageHead.headers + + -- Add identity header based on direction + let headers := + let identityOpt := machine.config.identityHeader + match dir, identityOpt with + | .receiving, some server => headers.insert .server server + | .sending, some userAgent => headers.insert .userAgent userAgent + | _, none => headers + + -- Add Connection: close if needed + let headers := + if !machine.keepAlive ∧ !headers.hasEntry .connection .close then + headers.insert .connection .close + else + headers + + -- Add Content-Length or Transfer-Encoding if needed + let headers := + if !(headers.contains .contentLength ∨ headers.contains .transferEncoding) then + match size with + | .fixed n => headers.insert .contentLength (.ofString! <| toString n) + | .chunked => headers.insert .transferEncoding .chunked + else + headers + + let state := Writer.State.writingBody size + + let messageHead := + match dir, messageHead with + | .receiving, messageHead => toString { messageHead with headers } + | .sending, messageHead => toString { messageHead with headers } + + machine.modifyWriter (fun writer => { + writer with + outputData := writer.outputData.append messageHead.toUTF8, + state + }) + +/--Put some data inside the input of the machine. -/ +@[inline] +def feed (machine : Machine ty) (data : ByteArray) : Machine ty := + if machine.isReaderClosed then + machine + else + { machine with reader := machine.reader.feed data } + +/--Signal that reader is not going to receive any more messages. -/ +@[inline] +def closeReader (machine : Machine dir) : Machine dir := + machine.modifyReader ({ · with noMoreInput := true }) + +/--Signal that the writer cannot send more messages because the socket closed. -/ +@[inline] +def closeWriter (machine : Machine dir) : Machine dir := + machine.modifyWriter ({ · with state := .closed, userClosedBody := true }) + +/--Signal that the user is not sending data anymore. -/ +@[inline] +def userClosedBody (machine : Machine dir) : Machine dir := + machine.modifyWriter ({ · with userClosedBody := true }) + +/--Signal that the socket is not sending data anymore. -/ +@[inline] +def noMoreInput (machine : Machine dir) : Machine dir := + machine.modifyReader ({ · with noMoreInput := true }) + +/--Set a known size for the message body. -/ +@[inline] +def setKnownSize (machine : Machine dir) (size : Body.Length) : Machine dir := + machine.modifyWriter (fun w => { w with knownSize := w.knownSize.or (some size) }) + +/--Send the head of a message to the machine. -/ +@[inline] +def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir := + if machine.isWaitingMessage then + let machine := machine.modifyWriter ({ · with messageHead := message, sentMessage := true }) + + let machine := + if machine.writer.knownSize.isNone then + match message.getSize false with + | some size => machine.setKnownSize size + | none => machine + else + machine + + machine.setWriterState .waitingForFlush + else + machine + +/-- Send data to the socket. -/ +@[inline] +def sendData (machine : Machine dir) (data : Array Chunk) : Machine dir := + if data.isEmpty then + machine + else + machine.modifyWriter (fun writer => { writer with userData := writer.userData ++ data }) + +/-- Get all the events of the machine. -/ +@[inline] +def takeEvents (machine : Machine dir) : Machine dir × Array (Event dir) := + ({ machine with events := #[] }, machine.events) + +/-- Take all the accumulated output to send to the socket. -/ +@[inline] +def takeOutput (machine : Machine dir) : Machine dir × ChunkedBuffer := + let output := machine.writer.outputData + ({ machine with writer := { machine.writer with outputData := .empty } }, output) + +/-- Process the writer part of the machine. -/ +partial def processWrite (machine : Machine dir) : Machine dir := + match machine.writer.state with + | .pending => + if machine.reader.isClosed then + machine.closeWriter + else + machine + | .waitingHeaders => + machine.addEvent .needAnswer + | .waitingForFlush => + if machine.shouldFlush then + machine.setHeaders machine.writer.messageHead + |> processWrite + else + machine + + | .writingHeaders => + machine.setWriterState (.writingBody (Writer.determineTransferMode machine.writer)) + |> processWrite + + | .writingBody (.fixed n) => + if machine.writer.userData.size > 0 ∨ machine.writer.isReadyToSend then + let (writer, remaining) := Writer.writeFixedBody machine.writer n + let machine := { machine with writer } + + if machine.writer.isReadyToSend ∨ remaining = 0 then + machine.setWriterState .complete |> processWrite + else + machine.setWriterState (.writingBody (.fixed remaining)) + else + machine + + | .writingBody .chunked => + if machine.writer.userClosedBody then + machine.modifyWriter Writer.writeFinalChunk + |>.setWriterState .complete + |> processWrite + else if machine.writer.userData.size > 0 ∨ machine.writer.isReadyToSend then + machine.modifyWriter Writer.writeChunkedBody + |> processWrite + else + machine + + | .shuttingDown => + if machine.writer.outputData.isEmpty then + machine.setWriterState .complete |> processWrite + else + machine + + | .complete => + if machine.isReaderComplete then + if machine.keepAlive then + resetForNextMessage machine + else + machine.setWriterState .closed + |>.addEvent .close + else if machine.isReaderClosed then + machine.setWriterState .closed + |>.addEvent .close + else + if machine.keepAlive then + machine + else + machine.setWriterState .closed + + | .closed => + machine + +/--Handle the failed state for the reader. -/ +private def handleReaderFailed (machine : Machine dir) (error : H1.Error) : Machine dir := + let machine : Machine dir := + match dir with + | .receiving => machine + |>.setWriterState .waitingHeaders + |>.disableKeepAlive + |>.send { status := .badRequest } |>.userClosedBody + | .sending => machine + + machine + |>.setReaderState .closed + |>.addEvent (.failed error) + |>.setError error + +/--Process the reader part of the machine. -/ +partial def processRead (machine : Machine dir) : Machine dir := + match machine.reader.state with + | .needStartLine => + if machine.reader.noMoreInput ∧ machine.reader.input.atEnd then + machine.setReaderState .closed + else if machine.reader.input.atEnd then + machine.addEvent (.needMoreData none) + else + let (machine, result) : Machine dir × Option (Message.Head dir) := + match dir with + | .receiving => parseWith machine (parseRequestLine machine.config) (limit := some 8192) + | .sending => parseWith machine (parseStatusLine machine.config) (limit := some 8192) + + if let some head := result then + if head.version != .v11 then + machine.setFailure .unsupportedVersion + else + machine + |>.modifyReader (.setMessageHead head) + |>.setReaderState (.needHeader 0) + |> processRead + else + machine + + | .needHeader headerCount => + let (machine, result) := parseWith machine + (parseSingleHeader machine.config) (limit := none) + + if headerCount > machine.config.maxHeaders then + machine |>.setFailure .badMessage + else + if let some result := result then + if let some (name, value) := result then + if let some (name, headerValue) := Prod.mk <$> Header.Name.ofString? name <*> Header.Value.ofString? value then + machine + |>.modifyReader (.addHeader name headerValue) + |>.setReaderState (.needHeader (headerCount + 1)) + |> processRead + else + machine.setFailure .badMessage + else + processHeaders machine + |> processRead + else + machine + + | .needChunkedSize => + let (machine, result) := parseWith machine (parseChunkSize machine.config) (limit := some 128) + + match result with + | some (size, ext) => + machine + |>.setReaderState (.needChunkedBody ext size) + |> processRead + | none => + machine + + | .needChunkedBody ext 0 => + let (machine, result) := parseWith machine (parseLastChunkBody machine.config) (limit := some 2) + + match result with + | some _ => + machine + |>.setReaderState .complete + |>.addEvent (.gotData true ext .empty) + |> processRead + | none => + machine + + | .needChunkedBody ext size => + let (machine, result) := parseWith machine + (parseChunkedSizedData size) (limit := none) (some size) + + if let some body := result then + match body with + | .complete body => + machine + |>.setReaderState .needChunkedSize + |>.addEvent (.gotData false ext body) + |> processRead + | .incomplete body remaining => + machine + |>.setReaderState (.needChunkedBody ext remaining) + |>.addEvent (.gotData false ext body) + else + machine + + | .needFixedBody 0 => + machine + |>.setReaderState .complete + |>.addEvent (.gotData true #[] .empty) + |> processRead + + | .needFixedBody size => + let (machine, result) := parseWith machine (parseFixedSizeData size) (limit := none) (some size) + + if let some body := result then + match body with + | .complete body => + machine + |>.setReaderState .complete + |>.addEvent (.gotData true #[] body) + |> processRead + | .incomplete body remaining => + machine + |>.setReaderState (.needFixedBody remaining) + |>.addEvent (.gotData false #[] body) + else + machine + + | .complete => + if (machine.reader.noMoreInput ∧ machine.reader.input.atEnd) ∨ ¬machine.keepAlive then + machine.setReaderState .closed + else + machine + + | .closed => + machine + + | .failed error => + handleReaderFailed machine error + +/-- +Execute one step of the state machine. +-/ +def step (machine : Machine dir) : Machine dir × StepResult dir := + let machine := machine.processRead.processWrite + let (machine, events) := machine.takeEvents + let (machine, output) := machine.takeOutput + (machine, { events, output }) + +end Std.Http.Protocol.H1.Machine diff --git a/src/Std/Internal/Http/Protocol/H1/Config.lean b/src/Std/Internal/Http/Protocol/H1/Config.lean new file mode 100644 index 000000000000..310c5f8df066 --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1/Config.lean @@ -0,0 +1,97 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Http.Data +public import Std.Internal.Http.Internal + +public section + +/-! +# HTTP/1.1 Configuration + +This module defines the configuration options for HTTP/1.1 protocol processing, +including connection limits, header constraints, and various size limits. +-/ + +namespace Std.Http.Protocol.H1 + +set_option linter.all true + +open Std Internal Parsec ByteArray +open Internal + +/-- +Connection limits configuration with validation. +-/ +structure Config where + /-- + Maximum number of messages per connection. + -/ + maxMessages : Nat := 100 + + /-- + Maximum number of headers allowed per message. + -/ + maxHeaders : Nat := 100 + + /-- + Whether to enable keep-alive connections by default. + -/ + enableKeepAlive : Bool := true + + /-- + The server name (for sending responses) or user agent (for sending requests) + -/ + identityHeader : Option Header.Value := some (.new "LeanServer") + + /-- + Maximum length of HTTP method token (default: 16 bytes) + -/ + maxMethodLength : Nat := 16 + + /-- + Maximum length of request URI (default: 8192 bytes) + -/ + maxUriLength : Nat := 8192 + + /-- + Maximum length of header field name (default: 256 bytes) + -/ + maxHeaderNameLength : Nat := 256 + + /-- + Maximum length of header field value (default: 8192 bytes) + -/ + maxHeaderValueLength : Nat := 8192 + + /-- + Maximum number of spaces in delimiter sequences (default: 256) + -/ + maxSpaceSequence : Nat := 256 + + /-- + Maximum length of chunk extension name (default: 256 bytes) + -/ + maxChunkExtNameLength : Nat := 256 + + /-- + Maximum length of chunk extension value (default: 256 bytes) + -/ + maxChunkExtValueLength : Nat := 256 + + /-- + Maximum length of reason phrase (default: 512 bytes) + -/ + maxReasonPhraseLength : Nat := 512 + + /-- + Maximum number of trailer headers (default: 100) + -/ + maxTrailerHeaders : Nat := 100 + +end Std.Http.Protocol.H1 diff --git a/src/Std/Internal/Http/Protocol/H1/Error.lean b/src/Std/Internal/Http/Protocol/H1/Error.lean new file mode 100644 index 000000000000..e02e7ee36b11 --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1/Error.lean @@ -0,0 +1,98 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Time +public import Std.Internal.Http.Data +public import Std.Internal.Http.Internal +public import Std.Internal.Http.Protocol.H1.Parser +public import Std.Internal.Http.Protocol.H1.Config +public import Std.Internal.Http.Protocol.H1.Message + +public section + +/-! +# HTTP/1.1 Errors + +This module defines the error types for HTTP/1.1 protocol processing, +including parsing errors, timeout errors, and connection errors. +-/ + +namespace Std.Http.Protocol.H1 + +set_option linter.all true + +/-- +Specific HTTP processing errors with detailed information. +-/ +inductive Error + /-- + Malformed request line or status line. + -/ + | invalidStatusLine + + /-- + Invalid or malformed header. + -/ + | invalidHeader + + /-- + Request timeout occurred. + -/ + | timeout + + /-- + Request entity too large. + -/ + | entityTooLarge + + /-- + Unsupported HTTP method. + -/ + | unsupportedMethod + + /-- + Unsupported HTTP version. + -/ + | unsupportedVersion + + /-- + Invalid chunk encoding. + -/ + | invalidChunk + + /-- + Connection Closed + -/ + | connectionClosed + + /-- + Bad request/response + -/ + | badMessage + + /-- + Generic error with message. + -/ + | other (message : String) +deriving Repr, BEq + +instance : ToString Error where + toString + | .invalidStatusLine => "Invalid status line" + | .invalidHeader => "Invalid header" + | .timeout => "Timeout" + | .entityTooLarge => "Entity too large" + | .unsupportedMethod => "Unsupported method" + | .unsupportedVersion => "Unsupported version" + | .invalidChunk => "Invalid chunk" + | .connectionClosed => "Connection closed" + | .badMessage => "Bad message" + | .other msg => s!"Other error: {msg}" + +instance : Repr ByteSlice where + reprPrec x := reprPrec x.toByteArray.data diff --git a/src/Std/Internal/Http/Protocol/H1/Event.lean b/src/Std/Internal/Http/Protocol/H1/Event.lean new file mode 100644 index 000000000000..11d2e3895366 --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1/Event.lean @@ -0,0 +1,73 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Time +public import Std.Internal.Http.Data +public import Std.Internal.Http.Internal +public import Std.Internal.Http.Protocol.H1.Parser +public import Std.Internal.Http.Protocol.H1.Config +public import Std.Internal.Http.Protocol.H1.Message +public import Std.Internal.Http.Protocol.H1.Error + +public section + +/-! +# HTTP/1.1 Events + +This module defines the events that can occur during HTTP/1.1 message processing, +including header completion, data arrival, and error conditions. +-/ + +namespace Std.Http.Protocol.H1 + +set_option linter.all true + +/-- +Events emitted during HTTP message processing. +-/ +inductive Event (dir : Direction) + /-- + Indicates that all headers have been successfully parsed. + -/ + | endHeaders (head : Message.Head dir) + + /-- + Carries a chunk of message body data. + -/ + | gotData (final : Bool) (ext : Array (String × Option String)) (data : ByteSlice) + + /-- + Signals that additional input data is required to continue processing. + -/ + | needMoreData (size : Option Nat) + + /-- + Indicates a failure during parsing or processing. + -/ + | failed (err : Error) + + /-- + Requests that the connection be closed. + -/ + | close + + /-- + Indicates that a response is required. + -/ + | needAnswer + + /-- + Indicates that a message body is required. + -/ + | needBody + + /-- + Indicates readiness to process the next message. + -/ + | next +deriving Inhabited, Repr diff --git a/src/Std/Internal/Http/Protocol/H1/Message.lean b/src/Std/Internal/Http/Protocol/H1/Message.lean new file mode 100644 index 000000000000..12ec1390090f --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1/Message.lean @@ -0,0 +1,130 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Http.Data + +public section + +/-! +# Message + +This module provides types and operations for HTTP/1.1 messages, centered around the `Direction` +type which models the server's role in message exchange: `Direction.receiving` for parsing incoming +requests from clients, and `Direction.sending` for generating outgoing responses to clients. +The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or +`Response.Head` accordingly, enabling generic code that works uniformly across both phases +while exposing common operations such as headers, version, and `shouldKeepAlive` +-/ + +namespace Std.Http.Protocol.H1 + +set_option linter.all true + +/-- +Direction of message flow from the server's perspective. +-/ +inductive Direction + /-- + Receiving and parsing incoming requests from clients. + -/ + | receiving + + /-- + Generating and sending outgoing responses to clients. + -/ + | sending +deriving BEq + +/-- +Inverts the direction of the requests. +-/ +@[expose] +abbrev Direction.swap : Direction → Direction + | .receiving => .sending + | .sending => .receiving + +/-- +Gets the message head type based on direction. +-/ +@[expose] +def Message.Head : Direction → Type + | .receiving => Request.Head + | .sending => Response.Head + +/-- +Gets the headers of a `Message`. +-/ +def Message.Head.headers (m : Message.Head dir) : Headers := + match dir with + | .receiving => Request.Head.headers m + | .sending => Response.Head.headers m + +/-- +Gets the version of a `Message`. +-/ +def Message.Head.version (m : Message.Head dir) : Version := + match dir with + | .receiving => Request.Head.version m + | .sending => Response.Head.version m + +/-- +Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag. +-/ +def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length := + match message.headers.getAll? .contentLength, message.headers.getAll? .transferEncoding with + | some #[cl], none => + .fixed <$> (Header.ContentLength.parse cl).map (·.length) + | none, some #[te] => + (if · then some .chunked else none) =<< (Header.TransferEncoding.parse te).map (·.isChunked) + | none, none => + if allowEOFBody then some (.fixed 0) else none + | _, _ => none + +/-- +Checks whether the message indicates that the connection should be kept alive. +-/ +@[inline] +def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool := + ¬message.headers.hasEntry .connection (.new "close") + ∧ message.version = .v11 + +instance : Repr (Message.Head dir) := + match dir with + | .receiving => inferInstanceAs (Repr Request.Head) + | .sending => inferInstanceAs (Repr Response.Head) + +instance : ToString (Message.Head dir) := + match dir with + | .receiving => inferInstanceAs (ToString Request.Head) + | .sending => inferInstanceAs (ToString Response.Head) + +instance : EmptyCollection (Message.Head dir) where + emptyCollection := + match dir with + | .receiving => {} + | .sending => {} + +private def isChunked (message : Message.Head dir) : Option Bool := + let headers := message.headers + + if let some res := headers.get? .transferEncoding then + let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower) + if encodings.isEmpty then + none + else + let chunkedCount := encodings.filter (· == "chunked") |>.size + let lastIsChunked := encodings.back? == some "chunked" + + if chunkedCount > 1 then + none + else if chunkedCount = 1 ∧ ¬lastIsChunked then + none + else + some lastIsChunked + else + some false diff --git a/src/Std/Internal/Http/Protocol/H1/Parser.lean b/src/Std/Internal/Http/Protocol/H1/Parser.lean new file mode 100644 index 000000000000..bde1598488dc --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1/Parser.lean @@ -0,0 +1,314 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Parsec +public import Std.Internal.Http.Data +public import Std.Internal.Parsec.ByteArray +public import Std.Internal.Http.Protocol.H1.Config + +/-! +This module defines a parser for HTTP/1.1 requests. The reference used is https://httpwg.org/specs/rfc9112.html. +-/ + +namespace Std.Http.Protocol.H1 + +open Std Internal Parsec ByteArray Internal + +set_option linter.all true + +@[inline] +def isDigit (c : UInt8) : Bool := + c ≥ '0'.toUInt8 ∧ c ≤ '9'.toUInt8 + +@[inline] +def isAlpha (c : UInt8) : Bool := + (c ≥ 'a'.toUInt8 ∧ c ≤ 'z'.toUInt8) ∨ (c ≥ 'A'.toUInt8 ∧ c ≤ 'Z'.toUInt8) + +@[inline] +def isVChar (c : UInt8) : Bool := + c ≥ 0x21 ∧ c ≤ 0x7E + +def isTokenCharacter (c : UInt8) : Bool := + isDigit c ∨ isAlpha c ∨ c == '!'.toUInt8 ∨ c == '#'.toUInt8 ∨ c == '$'.toUInt8 ∨ c == '%'.toUInt8 ∨ + c == '&'.toUInt8 ∨ c == '\''.toUInt8 ∨ c == '*'.toUInt8 ∨ c == '+'.toUInt8 ∨ c == '-'.toUInt8 ∨ + c == '.'.toUInt8 ∨ c == '^'.toUInt8 ∨ c == '_'.toUInt8 ∨ c == '`'.toUInt8 ∨ c == '|'.toUInt8 ∨ + c == '~'.toUInt8 + +@[inline] +def isObsChar (c : UInt8) : Bool := + c ≥ 0x80 ∧ c ≤ 0xFF + +@[inline] +def isFieldVChar (c : UInt8) : Bool := + isVChar c ∨ isObsChar c ∨ c = ' '.toUInt8 ∨ c = '\t'.toUInt8 + +-- HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text +@[inline] +def isQdText (c : UInt8) : Bool := + c == '\t'.toUInt8 ∨ + c == ' '.toUInt8 ∨ + c == '!'.toUInt8 ∨ + (c ≥ '#'.toUInt8 ∧ c ≤ '['.toUInt8) ∨ + (c ≥ ']'.toUInt8 ∧ c ≤ '~'.toUInt8) ∨ + isObsChar c + +-- Parser blocks + +def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do + let items ← many (attempt <| parser.bind (fun item => match item with + | some x => return x + | none => fail "end of items")) + if items.size > maxCount then + fail s!"Too many items: {items.size} > {maxCount}" + return items + +def opt (x : Option α) : Parser α := + if let some res := x then + return res + else + fail "expected value but got none" + +@[inline] +def token (limit : Nat) : Parser ByteSlice := + takeWhileUpTo1 isTokenCharacter limit + +@[inline] +def crlf : Parser Unit := do + discard <| optional (skipByte '\r'.toUInt8) + skipByte '\n'.toUInt8 + +@[inline] +def rsp (limits : H1.Config) : Parser Unit := + discard <| takeWhileUpTo1 (· == ' '.toUInt8) limits.maxSpaceSequence + +@[inline] +def osp (limits : H1.Config) : Parser Unit := + discard <| takeWhileUpTo (· == ' '.toUInt8) limits.maxSpaceSequence + +@[inline] +def uint8 : Parser UInt8 := do + let d ← digit + return d.toUInt8 + +def hexDigit : Parser UInt8 := do + let b ← any + if b ≥ '0'.toUInt8 && b ≤ '9'.toUInt8 then return b - '0'.toUInt8 + else if b ≥ 'A'.toUInt8 && b ≤ 'F'.toUInt8 then return b - 'A'.toUInt8 + 10 + else if b ≥ 'a'.toUInt8 && b ≤ 'f'.toUInt8 then return b - 'a'.toUInt8 + 10 + else fail s!"Invalid hex digit {Char.ofUInt8 b |>.quote}" + +@[inline] +def hex : Parser Nat := do + let hexDigits ← many1 (attempt hexDigit) + return (hexDigits.foldl (fun acc cur => acc * 16 + cur.toNat) 0) + +-- Actual parsers + +-- HTTP-version = HTTP-name "/" DIGIT "." DIGIT +-- HTTP-name = %s"HTTP" +def parseHttpVersion : Parser Version := do + skipBytes "HTTP/".toUTF8 + let major ← uint8 + skipByte '.'.toUInt8 + let minor ← uint8 + opt <| Version.ofNumber? (major - 48 |>.toNat) (minor - 48 |>.toNat) + +-- method = token +def parseMethod : Parser Method := + (skipBytes "GET".toUTF8 <&> fun _ => Method.get) + <|> (skipBytes "HEAD".toUTF8 <&> fun _ => Method.head) + <|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post) + <|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put) + <|> (skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete) + <|> (skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect) + <|> (skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options) + <|> (skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace) + <|> (skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch) + +def parseURI (limits : H1.Config) : Parser ByteArray := do + let uri ← takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength + return uri.toByteArray + +/-- +Parses a request line + +request-line = method SP request-target SP HTTP-version +-/ +public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do + let method ← parseMethod <* rsp limits + let uri ← parseURI limits <* rsp limits + + let uri ← match (Std.Http.URI.Parser.parseRequestTarget <* eof).run uri with + | .ok res => pure res + | .error res => fail res + + let version ← parseHttpVersion <* crlf + return ⟨method, version, uri, .empty⟩ + +-- field-line = field-name ":" OWS field-value OWS +def parseFieldLine (limits : H1.Config) : Parser (String × String) := do + let name ← token limits.maxHeaderNameLength + let value ← skipByte ':'.toUInt8 *> osp limits *> takeWhileUpTo1 isFieldVChar limits.maxHeaderValueLength <* osp limits + + let name ← opt <| String.fromUTF8? name.toByteArray + let value ← opt <| String.fromUTF8? value.toByteArray + + return (name, value) + +/-- +Parses a single header. + +field-line CRLF / CRLF +-/ +public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do + let next ← peek? + if next == some '\r'.toUInt8 ∨ next == some '\n'.toUInt8 then + crlf + pure none + else + some <$> (parseFieldLine limits <* crlf) + +-- quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) +def parseQuotedPair : Parser UInt8 := do + skipByte '\\'.toUInt8 + let b ← any + + if b == '\t'.toUInt8 ∨ b == ' '.toUInt8 ∨ isVChar b ∨ isObsChar b then + return b + else + fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}" + +-- quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE +partial def parseQuotedString : Parser String := do + skipByte '"'.toUInt8 + + let rec loop (buf : ByteArray) : Parser ByteArray := do + let b ← any + + if b == '"'.toUInt8 then + return buf + else if b == '\\'.toUInt8 then + let next ← any + if next == '\t'.toUInt8 ∨ next == ' '.toUInt8 ∨ isVChar next ∨ isObsChar next + then loop (buf.push next) + else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}" + else if isQdText b then + loop (buf.push b) + else + fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}" + + opt <| String.fromUTF8? (← loop .empty) + +-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] ) +def parseChunkExt (limits : H1.Config) : Parser (String × Option String) := do + osp limits *> skipByte ';'.toUInt8 *> osp limits + let name ← (opt =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtNameLength) <* osp limits + + if (← peekWhen? (· == '='.toUInt8)) |>.isSome then + osp limits *> skipByte '='.toUInt8 *> osp limits + let value ← osp limits *> (parseQuotedString <|> opt =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtValueLength)) + return (name, some value) + + return (name, none) + +/-- +This function parses the size and extension of a chunk +-/ +public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (String × Option String)) := do + let size ← hex + let ext ← many (parseChunkExt limits) + crlf + return (size, ext) + +/-- +Result of parsing partial or complete information. +-/ +public inductive TakeResult + | complete (data : ByteSlice) + | incomplete (data : ByteSlice) (remaining : Nat) + +/-- +This function parses a single chunk in chunked transfer encoding +-/ +public def parseChunk (limits : H1.Config) : Parser (Option (Nat × Array (String × Option String) × ByteSlice)) := do + let (size, ext) ← parseChunkSize limits + if size == 0 then + return none + else + let data ← take size + return some ⟨size, ext, data⟩ + +/-- +Parses a fixed size data that can be incomplete. +-/ +public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it => + if it.remainingBytes = 0 then + .error it .eof + else if it.remainingBytes < size then + .success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes)) + else + .success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)])) + +/-- +Parses a fixed size data that can be incomplete. +-/ +public def parseChunkedSizedData (size : Nat) : Parser TakeResult := do + match ← parseFixedSizeData size with + | .complete data => crlf *> return .complete data + | .incomplete data res => return .incomplete data res + +/-- +This function parses a trailer header (used after chunked body) +-/ +def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := parseSingleHeader limits + +/-- +This function parses trailer headers after chunked body +-/ +public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do + let trailers ← manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders + crlf + return trailers + +/-- +Parses HTTP status code (3 digits) +-/ +def parseStatusCode : Parser Status := do + let d1 ← digit + let d2 ← digit + let d3 ← digit + let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48) + + return Status.ofCode code.toUInt16 + +/-- +Parses reason phrase (text after status code) +-/ +def parseReasonPhrase (limits : H1.Config) : Parser String := do + let bytes ← takeWhileUpTo (fun c => c != '\r'.toUInt8) limits.maxReasonPhraseLength + opt <| String.fromUTF8? bytes.toByteArray + +/-- +Parses a status line + +status-line = HTTP-version SP status-code SP [ reason-phrase ] +-/ +public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do + let version ← parseHttpVersion <* rsp limits + let status ← parseStatusCode <* rsp limits + discard <| parseReasonPhrase limits <* crlf + return ⟨status, version, .empty⟩ + +/-- +This function parses the body of the last chunk. +-/ +public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do + discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders + crlf + +end Std.Http.Protocol.H1 diff --git a/src/Std/Internal/Http/Protocol/H1/Reader.lean b/src/Std/Internal/Http/Protocol/H1/Reader.lean new file mode 100644 index 000000000000..ce2d7622a6f8 --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1/Reader.lean @@ -0,0 +1,269 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Time +public import Std.Internal.Http.Data +public import Std.Internal.Http.Internal +public import Std.Internal.Http.Protocol.H1.Parser +public import Std.Internal.Http.Protocol.H1.Config +public import Std.Internal.Http.Protocol.H1.Message +public import Std.Internal.Http.Protocol.H1.Error + +public section + +/-! +# HTTP/1.1 Reader + +This module defines the reader state machine for parsing incoming HTTP/1.1 messages. +It tracks the parsing state including start line, headers, and body handling for +both fixed-length and chunked transfer encodings. +-/ + +namespace Std.Http.Protocol.H1 + +set_option linter.all true + +/-- +The state of the `Reader` state machine. +-/ +inductive Reader.State (dir : Direction) : Type + /-- + Initial state waiting for HTTP start line. + -/ + | needStartLine : State dir + + /-- + State waiting for HTTP headers, tracking number of headers parsed. + -/ + | needHeader : Nat → State dir + + /-- + State waiting for chunk size in chunked transfer encoding. + -/ + | needChunkedSize : State dir + + /-- + State waiting for chunk body data of specified size. + -/ + | needChunkedBody : Array (String × Option String) → Nat → State dir + + /-- + State waiting for fixed-length body data of specified size. + -/ + | needFixedBody : Nat → State dir + + /-- + State that it completed a single request or response and can go to the next one + -/ + | complete + + /-- + State that it has completed and cannot process more data. + -/ + | closed + + /-- + The input is malformed. + -/ + | failed (error : Error) : State dir +deriving Inhabited, Repr, BEq + +/-- +Manages the reading state of the HTTP parsing and processing machine. +-/ +structure Reader (dir : Direction) where + /-- + The current state of the machine. + -/ + state : Reader.State dir := .needStartLine + + /-- + The input byte array. + -/ + input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter + + /-- + The incoming message head. + -/ + messageHead : Message.Head dir := {} + + /-- + Count of messages that this connection already parsed + -/ + messageCount : Nat := 0 + + /-- + Flag that says that it cannot receive more input (the socket disconnected). + -/ + noMoreInput : Bool := false + +namespace Reader + +/-- +Checks if the reader is in a closed state and cannot process more messages. +-/ +@[inline] +def isClosed (reader : Reader dir) : Bool := + match reader.state with + | .closed => true + | _ => false + +/-- +Checks if the reader has completed parsing the current message. +-/ +@[inline] +def isComplete (reader : Reader dir) : Bool := + match reader.state with + | .complete => true + | _ => false + +/-- +Checks if the reader has encountered an error. +-/ +@[inline] +def hasFailed (reader : Reader dir) : Bool := + match reader.state with + | .failed _ => true + | _ => false + +/-- +Feeds new data into the reader's input buffer. +If the current input is exhausted, replaces it; otherwise appends. +-/ +@[inline] +def feed (data : ByteArray) (reader : Reader dir) : Reader dir := + { reader with input := + if reader.input.atEnd + then data.iter + else { reader.input with array := reader.input.array ++ data } } + +/-- +Replaces the reader's input iterator with a new one. +-/ +@[inline] +def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir := + { reader with input } + +/-- +Updates the message head being constructed. +-/ +@[inline] +def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir := + { reader with messageHead } + +/-- +Adds a header to the current message head. +-/ +@[inline] +def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir := + match dir with + | .sending => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } } + | .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } } + +/-- +Closes the reader, transitioning to the closed state. +-/ +@[inline] +def close (reader : Reader dir) : Reader dir := + { reader with state := .closed, noMoreInput := true } + +/-- +Marks the current message as complete and prepares for the next message. +-/ +@[inline] +def markComplete (reader : Reader dir) : Reader dir := + { reader with + state := .complete + messageCount := reader.messageCount + 1 } + +/-- +Transitions the reader to a failed state with the given error. +-/ +@[inline] +def fail (error : Error) (reader : Reader dir) : Reader dir := + { reader with state := .failed error } + +/-- +Resets the reader to parse a new message on the same connection. +-/ +@[inline] +def reset (reader : Reader dir) : Reader dir := + { reader with + state := .needStartLine + messageHead := {} } + +/-- +Checks if more input is needed to continue parsing. +-/ +@[inline] +def needsMoreInput (reader : Reader dir) : Bool := + reader.input.atEnd && !reader.noMoreInput && + match reader.state with + | .complete | .closed | .failed _ => false + | _ => true + +/-- +Returns the current parse error if the reader has failed. +-/ +@[inline] +def getError (reader : Reader dir) : Option Error := + match reader.state with + | .failed err => some err + | _ => none + +/-- +Gets the number of bytes remaining in the input buffer. +-/ +@[inline] +def remainingBytes (reader : Reader dir) : Nat := + reader.input.array.size - reader.input.pos + +/-- +Advances the input iterator by n bytes. +-/ +@[inline] +def advance (n : Nat) (reader : Reader dir) : Reader dir := + { reader with input := reader.input.forward n } + +/-- +Transitions to the state for reading headers. +-/ +@[inline] +def startHeaders (reader : Reader dir) : Reader dir := + { reader with state := .needHeader 0 } + +/-- +Transitions to the state for reading a fixed-length body. +-/ +@[inline] +def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir := + { reader with state := .needFixedBody size } + +/-- +Transitions to the state for reading chunked transfer encoding. +-/ +@[inline] +def startChunkedBody (reader : Reader dir) : Reader dir := + { reader with state := .needChunkedSize } + +/-- +Marks that no more input will be provided (connection closed). +-/ +@[inline] +def markNoMoreInput (reader : Reader dir) : Reader dir := + { reader with noMoreInput := true } + +/-- +Checks if the connection should be kept alive for the next message. +-/ +def shouldKeepAlive (reader : Reader dir) : Bool := + match reader.messageHead.headers.get? .connection with + | some val => let s := val.value.toLower; s == "keep-alive" + | none => true + +end Reader diff --git a/src/Std/Internal/Http/Protocol/H1/Writer.lean b/src/Std/Internal/Http/Protocol/H1/Writer.lean new file mode 100644 index 000000000000..55f125faf6f7 --- /dev/null +++ b/src/Std/Internal/Http/Protocol/H1/Writer.lean @@ -0,0 +1,265 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Time +public import Std.Internal.Http.Data +public import Std.Internal.Http.Internal +public import Std.Internal.Http.Protocol.H1.Parser +public import Std.Internal.Http.Protocol.H1.Config +public import Std.Internal.Http.Protocol.H1.Message +public import Std.Internal.Http.Protocol.H1.Error + +public section + +/-! +# HTTP/1.1 Writer + +This module defines the writer state machine for generating outgoing HTTP/1.1 messages. +It handles encoding headers and body content for both fixed-length and chunked +transfer encodings. +-/ + +namespace Std.Http.Protocol.H1 + +set_option linter.all true + +open Internal + +/-- +The state of the `Writer` state machine. +-/ +inductive Writer.State + /-- + It starts writing only when part of the request is received. + -/ + | pending + + /-- + Ready to write the message + -/ + | waitingHeaders + + /-- + This is the state that we wait for a forced flush. This happens and causes the writer to + start actually writing to the outputData + -/ + | waitingForFlush + + /-- + Writing the headers. + -/ + | writingHeaders + + /-- + Writing a fixed size body output. + -/ + | writingBody (mode : Body.Length) + + /-- + It will flush all the remaining data and cause it to shutdown the machine. + -/ + | shuttingDown + + /-- + State that it completed a single request and can go to the next one + -/ + | complete + + /-- + State that it has completed and cannot process more data. + -/ + | closed +deriving Inhabited, Repr, BEq + +/-- +Manages the writing state of the HTTP generating and writing machine. +-/ +structure Writer (dir : Direction) where + /-- + This is all the data that the user is sending that is being accumulated. + -/ + userData : Array Chunk := .empty + + /-- + All the data that is produced by the writer. + -/ + outputData : ChunkedBuffer := .empty + + /-- + The state of the writer machine. + -/ + state : Writer.State := .pending + + /-- + When the user specifies the exact size upfront, we can use Content-Length + instead of chunked transfer encoding for streaming + -/ + knownSize : Option Body.Length := none + + /-- + The outgoing message that will be written to the output + -/ + messageHead : Message.Head dir.swap := {} + + /-- + The user sent the message + -/ + sentMessage : Bool := false + + /-- + This flags that the body stream is closed so if we start to write the body we know exactly the size. + -/ + userClosedBody : Bool := false + +namespace Writer + +/-- +Checks if the writer is ready to send data to the output. +-/ +@[inline] +def isReadyToSend {dir} (writer : Writer dir) : Bool := + match writer.state with + | .closed | .complete => true + | _ => writer.userClosedBody + +/-- +Checks if the writer is closed (cannot process more data) +-/ +@[inline] +def isClosed (writer : Writer dir) : Bool := + match writer.state with + | .closed => true + | _ => false + +/-- +Checks if the writer has completed processing a request +-/ +@[inline] +def isComplete (writer : Writer dir) : Bool := + match writer.state with + | .complete => true + | _ => false + +/-- +Checks if the writer can accept more data from the user +-/ +@[inline] +def canAcceptData (writer : Writer dir) : Bool := + match writer.state with + | .waitingHeaders => true + | .waitingForFlush => true + | .writingBody _ => !writer.userClosedBody + | _ => false + +/-- +Marks the body as closed, indicating no more user data will be added +-/ +@[inline] +def closeBody (writer : Writer dir) : Writer dir := + { writer with userClosedBody := true } + +/-- +Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked +-/ +def determineTransferMode (writer : Writer dir) : Body.Length := + if let some mode := writer.knownSize then + mode + else if writer.userClosedBody then + let size := writer.userData.foldl (fun x y => x + y.size) 0 + .fixed size + else + .chunked + +/-- +Adds user data chunks to the writer's buffer if the writer can accept data +-/ +@[inline] +def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir := + if writer.canAcceptData then + { writer with userData := writer.userData ++ data } + else + writer + +/-- +Writes accumulated user data to output using fixed-size encoding +-/ +def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat := + if writer.userData.size = 0 then + (writer, limitSize) + else + let data := writer.userData.map Chunk.data + let (chunks, totalSize) := data.foldl (fun (acc, size) ba => + if size >= limitSize then + (acc, size) + else + let remaining := limitSize - size + let takeSize := min ba.size remaining + let chunk := ba.extract 0 takeSize + (acc.push chunk, size + takeSize) + ) (#[], 0) + let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks) + let remaining := limitSize - totalSize + ({ writer with userData := #[], outputData }, remaining) + +/-- +Writes accumulated user data to output using chunked transfer encoding +-/ +def writeChunkedBody (writer : Writer dir) : Writer dir := + if writer.userData.size = 0 then + writer + else + let data := writer.userData + { writer with userData := #[], outputData := data.foldl (Encode.encode .v11) writer.outputData } + +/-- +Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state +-/ +def writeFinalChunk (writer : Writer dir) : Writer dir := + let writer := writer.writeChunkedBody + { writer with + outputData := writer.outputData.write "0\r\n\r\n".toUTF8 + state := .complete + } + +/-- +Extracts all accumulated output data and returns it with a cleared output buffer +-/ +@[inline] +def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) := + let output := writer.outputData.toByteArray + some ({ writer with outputData := ChunkedBuffer.empty }, output) + +/-- +Updates the writer's state machine to a new state +-/ +@[inline] +def setState (state : Writer.State) (writer : Writer dir) : Writer dir := + { writer with state } + +/-- +Writes the message headers to the output buffer +-/ +private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir := + { writer with outputData := writer.outputData.push (toString messageHead).toUTF8 } + +/-- +Checks if the connection should be kept alive based on the Connection header +-/ +def shouldKeepAlive (writer : Writer dir) : Bool := + writer.messageHead.headers.get? .connection + |>.map (fun v => v.value.toLower != "close") + |>.getD true + +/-- +Closes the writer, transitioning to the closed state. +-/ +@[inline] +def close (writer : Writer dir) : Writer dir := + { writer with state := .closed } + +end Writer From 12a7603c7788bc6ca2a5e42d4d2ecc2882ae68e8 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sat, 24 Jan 2026 21:04:19 -0300 Subject: [PATCH 06/44] fix: orphan module --- src/Std/Internal/Http.lean | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Std/Internal/Http.lean b/src/Std/Internal/Http.lean index 1dc3c0b6ad35..9c7ee1f6899b 100644 --- a/src/Std/Internal/Http.lean +++ b/src/Std/Internal/Http.lean @@ -7,3 +7,4 @@ module prelude public import Std.Internal.Http.Data +public import Std.Internal.Http.Protocol.H1 From 7ce8cbc01cc9b51c652f3ba2c12ccd96c4dbac54 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sun, 25 Jan 2026 01:54:33 -0300 Subject: [PATCH 07/44] feat: remove toString instances --- src/Std/Internal/Http/Protocol/H1.lean | 42 ++++++------- .../Internal/Http/Protocol/H1/Message.lean | 61 +++++++++---------- src/Std/Internal/Http/Protocol/H1/Writer.lean | 2 +- 3 files changed, 52 insertions(+), 53 deletions(-) diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean index 0fd847955aa5..1cc79652c42d 100644 --- a/src/Std/Internal/Http/Protocol/H1.lean +++ b/src/Std/Internal/Http/Protocol/H1.lean @@ -180,7 +180,7 @@ private def checkMessageHead (message : Message.Head dir) : Option Body.Length : if message.method == .head ∨ message.method == .connect then return .fixed 0 - message.getSize false + message.getSize true -- State Checks @@ -319,10 +319,10 @@ def setHeaders (messageHead : Message.Head dir.swap) (machine : Machine dir) : M let size := Writer.determineTransferMode machine.writer let headers := - if messageHead.headers.contains .host then + if messageHead.headers.contains Header.Name.host then messageHead.headers else if let some host := machine.host then - messageHead.headers.insert .host host + messageHead.headers.insert Header.Name.host host else messageHead.headers @@ -330,36 +330,36 @@ def setHeaders (messageHead : Message.Head dir.swap) (machine : Machine dir) : M let headers := let identityOpt := machine.config.identityHeader match dir, identityOpt with - | .receiving, some server => headers.insert .server server - | .sending, some userAgent => headers.insert .userAgent userAgent + | .receiving, some server => headers.insert Header.Name.server server + | .sending, some userAgent => headers.insert Header.Name.userAgent userAgent | _, none => headers -- Add Connection: close if needed let headers := - if !machine.keepAlive ∧ !headers.hasEntry .connection .close then - headers.insert .connection .close + if !machine.keepAlive ∧ !headers.hasEntry Header.Name.connection Header.Value.close then + headers.insert Header.Name.connection Header.Value.close else headers -- Add Content-Length or Transfer-Encoding if needed let headers := - if !(headers.contains .contentLength ∨ headers.contains .transferEncoding) then + if !(headers.contains Header.Name.contentLength ∨ headers.contains Header.Name.transferEncoding) then match size with - | .fixed n => headers.insert .contentLength (.ofString! <| toString n) - | .chunked => headers.insert .transferEncoding .chunked + | .fixed n => headers.insert Header.Name.contentLength (.ofString! <| toString n) + | .chunked => headers.insert Header.Name.transferEncoding Header.Value.chunked else headers let state := Writer.State.writingBody size - let messageHead := - match dir, messageHead with - | .receiving, messageHead => toString { messageHead with headers } - | .sending, messageHead => toString { messageHead with headers } - machine.modifyWriter (fun writer => { writer with - outputData := writer.outputData.append messageHead.toUTF8, + + outputData := + match dir, messageHead with + | .receiving, messageHead => Encode.encode (v := .v11) writer.outputData { messageHead with headers } + | .sending, messageHead => Encode.encode (v := .v11) writer.outputData { messageHead with headers }, + state }) @@ -404,7 +404,7 @@ def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir let machine := if machine.writer.knownSize.isNone then - match message.getSize false with + match extractBodyLengthFromHeaders message.headers with | some size => machine.setKnownSize size | none => machine else @@ -414,7 +414,7 @@ def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir else machine -/-- Send data to the socket. -/ +/--Send data to the socket. -/ @[inline] def sendData (machine : Machine dir) (data : Array Chunk) : Machine dir := if data.isEmpty then @@ -422,18 +422,18 @@ def sendData (machine : Machine dir) (data : Array Chunk) : Machine dir := else machine.modifyWriter (fun writer => { writer with userData := writer.userData ++ data }) -/-- Get all the events of the machine. -/ +/--Get all the events of the machine. -/ @[inline] def takeEvents (machine : Machine dir) : Machine dir × Array (Event dir) := ({ machine with events := #[] }, machine.events) -/-- Take all the accumulated output to send to the socket. -/ +/--Take all the accumulated output to send to the socket. -/ @[inline] def takeOutput (machine : Machine dir) : Machine dir × ChunkedBuffer := let output := machine.writer.outputData ({ machine with writer := { machine.writer with outputData := .empty } }, output) -/-- Process the writer part of the machine. -/ +/--Process the writer part of the machine. -/ partial def processWrite (machine : Machine dir) : Machine dir := match machine.writer.state with | .pending => diff --git a/src/Std/Internal/Http/Protocol/H1/Message.lean b/src/Std/Internal/Http/Protocol/H1/Message.lean index 12ec1390090f..c4842a704b39 100644 --- a/src/Std/Internal/Http/Protocol/H1/Message.lean +++ b/src/Std/Internal/Http/Protocol/H1/Message.lean @@ -72,18 +72,37 @@ def Message.Head.version (m : Message.Head dir) : Version := | .receiving => Request.Head.version m | .sending => Response.Head.version m +private def isChunked (message : Message.Head dir) : Option Bool := + let headers := message.headers + + if let some res := headers.get? .transferEncoding then + let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower) + if encodings.isEmpty then + none + else + let chunkedCount := encodings.filter (· == "chunked") |>.size + let lastIsChunked := encodings.back? == some "chunked" + + if chunkedCount > 1 then + none + else if chunkedCount = 1 ∧ ¬lastIsChunked then + none + else + some lastIsChunked + else + some false + /-- Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag. -/ def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length := - match message.headers.getAll? .contentLength, message.headers.getAll? .transferEncoding with - | some #[cl], none => - .fixed <$> (Header.ContentLength.parse cl).map (·.length) - | none, some #[te] => - (if · then some .chunked else none) =<< (Header.TransferEncoding.parse te).map (·.isChunked) - | none, none => - if allowEOFBody then some (.fixed 0) else none - | _, _ => none + match (message.headers.getAll? .contentLength, isChunked message) with + | (some #[cl], some false) => .fixed <$> cl.value.toNat? + | (none, some false) => if allowEOFBody then some (.fixed 0) else none + | (none, some true) => some .chunked + | (some _, some _) => none -- To avoid request smuggling with multiple content-length headers. + | (_, none) => none -- Error validating the chunked encoding + /-- Checks whether the message indicates that the connection should be kept alive. @@ -98,33 +117,13 @@ instance : Repr (Message.Head dir) := | .receiving => inferInstanceAs (Repr Request.Head) | .sending => inferInstanceAs (Repr Response.Head) -instance : ToString (Message.Head dir) := +instance : Internal.Encode .v11 (Message.Head dir) := match dir with - | .receiving => inferInstanceAs (ToString Request.Head) - | .sending => inferInstanceAs (ToString Response.Head) + | .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head) + | .sending => inferInstanceAs (Internal.Encode .v11 Response.Head) instance : EmptyCollection (Message.Head dir) where emptyCollection := match dir with | .receiving => {} | .sending => {} - -private def isChunked (message : Message.Head dir) : Option Bool := - let headers := message.headers - - if let some res := headers.get? .transferEncoding then - let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower) - if encodings.isEmpty then - none - else - let chunkedCount := encodings.filter (· == "chunked") |>.size - let lastIsChunked := encodings.back? == some "chunked" - - if chunkedCount > 1 then - none - else if chunkedCount = 1 ∧ ¬lastIsChunked then - none - else - some lastIsChunked - else - some false diff --git a/src/Std/Internal/Http/Protocol/H1/Writer.lean b/src/Std/Internal/Http/Protocol/H1/Writer.lean index 55f125faf6f7..afbc77922ee4 100644 --- a/src/Std/Internal/Http/Protocol/H1/Writer.lean +++ b/src/Std/Internal/Http/Protocol/H1/Writer.lean @@ -245,7 +245,7 @@ def setState (state : Writer.State) (writer : Writer dir) : Writer dir := Writes the message headers to the output buffer -/ private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir := - { writer with outputData := writer.outputData.push (toString messageHead).toUTF8 } + { writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead } /-- Checks if the connection should be kept alive based on the Connection header From 6ffd5ad2a4789fa695e58c58e7aae6d95d07c3f4 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Sun, 25 Jan 2026 03:56:16 -0300 Subject: [PATCH 08/44] fix: incremental parsing --- src/Std/Internal/Parsec/ByteArray.lean | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/Std/Internal/Parsec/ByteArray.lean b/src/Std/Internal/Parsec/ByteArray.lean index c92bab2fc0bc..1fa874484f0d 100644 --- a/src/Std/Internal/Parsec/ByteArray.lean +++ b/src/Std/Internal/Parsec/ByteArray.lean @@ -56,16 +56,26 @@ def skipByte (b : UInt8) : Parser Unit := /-- Skip a sequence of bytes equal to the given `ByteArray`. -/ -def skipBytes (arr : ByteArray) : Parser Unit := do - for b in arr do - skipByte b +def skipBytes (arr : ByteArray) : Parser Unit := fun it => + if it.remainingBytes < arr.size then + .error it .eof + else + let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator := + if h : idx < arr.size then + match skipByte arr[idx] it with + | .success it' _ => go (idx + 1) it' + | .error it' err => .error it' err + else + .success it () + go 0 it /-- Parse a string by matching its UTF-8 bytes, returns the string on success. -/ @[inline] def pstring (s : String) : Parser String := do - skipBytes s.toUTF8 + let utf8 := s.toUTF8 + skipBytes utf8 return s /-- From f13651979e327bbd1c60fd7d069e9d8cc3f65402 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Mon, 9 Feb 2026 19:31:41 -0300 Subject: [PATCH 09/44] fix: wireFormatSize --- src/Std/Internal/Http/Data/Body/ChunkStream.lean | 2 +- src/Std/Internal/Http/Data/Chunk.lean | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/Std/Internal/Http/Data/Body/ChunkStream.lean b/src/Std/Internal/Http/Data/Body/ChunkStream.lean index 2c31586fcde6..3a62c0d13bfb 100644 --- a/src/Std/Internal/Http/Data/Body/ChunkStream.lean +++ b/src/Std/Internal/Http/Data/Body/ChunkStream.lean @@ -125,7 +125,7 @@ def empty : Async ChunkStream := private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length := match knownSize with - | some (.fixed res) => some (Body.Length.fixed (res - chunk.size)) + | some (.fixed res) => some (Body.Length.fixed (res - chunk.wireFormatSize)) | _ => knownSize private def tryWakeProducer [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : diff --git a/src/Std/Internal/Http/Data/Chunk.lean b/src/Std/Internal/Http/Data/Chunk.lean index d94d9678550a..abc3f203b342 100644 --- a/src/Std/Internal/Http/Data/Chunk.lean +++ b/src/Std/Internal/Http/Data/Chunk.lean @@ -75,6 +75,16 @@ instance : Encode .v11 Chunk where let size := Nat.toDigits 16 chunkLen |>.toArray |>.map Char.toUInt8 |> ByteArray.mk buffer.append #[size, exts.toUTF8, "\r\n".toUTF8, chunk.data, "\r\n".toUTF8] +/-- +Returns the total wire format size of the chunk in bytes. This includes the hex-encoded data length +prefix, formatted extensions (`;name=value`), CRLF after the size line, the data itself, and the +trailing CRLF. +-/ +def wireFormatSize (chunk : Chunk) : Nat := + let hexSize := (Nat.toDigits 16 chunk.data.size).length + let extensionsSize := chunk.extensions.foldl (fun acc (name, value) => acc + name.length + (value.map (fun v => v.length + 1) |>.getD 0) + 1) 0 + hexSize + extensionsSize + 2 + chunk.data.size + 2 + end Chunk /-- From 294e9900ea29ad0aacf73ab5fc13c3561ec20734 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Mon, 9 Feb 2026 20:29:18 -0300 Subject: [PATCH 10/44] feat: unify all in stream --- src/Std/Internal/Http/Data/Body.lean | 101 +------ src/Std/Internal/Http/Data/Body/Basic.lean | 41 +++ src/Std/Internal/Http/Data/Body/Empty.lean | 121 -------- src/Std/Internal/Http/Data/Body/Full.lean | 279 ------------------ .../Body/{ChunkStream.lean => Stream.lean} | 244 ++++++++++++--- .../Internal/Http/Internal/ChunkedBuffer.lean | 12 +- tests/lean/run/async_http_body.lean | 259 ++++------------ 7 files changed, 314 insertions(+), 743 deletions(-) create mode 100644 src/Std/Internal/Http/Data/Body/Basic.lean delete mode 100644 src/Std/Internal/Http/Data/Body/Empty.lean delete mode 100644 src/Std/Internal/Http/Data/Body/Full.lean rename src/Std/Internal/Http/Data/Body/{ChunkStream.lean => Stream.lean} (50%) diff --git a/src/Std/Internal/Http/Data/Body.lean b/src/Std/Internal/Http/Data/Body.lean index 327356d61301..a5c889877bf1 100644 --- a/src/Std/Internal/Http/Data/Body.lean +++ b/src/Std/Internal/Http/Data/Body.lean @@ -8,103 +8,6 @@ module prelude public import Std.Internal.Async.ContextAsync public import Std.Internal.Http.Data.Headers +public import Std.Internal.Http.Data.Body.Basic public import Std.Internal.Http.Data.Body.Length -public import Std.Internal.Http.Data.Body.ChunkStream -public import Std.Internal.Http.Data.Body.Full -public import Std.Internal.Http.Data.Body.Empty - -public section - -/-! -# Body - -This module defines the `Body` typeclass, which provides a uniform interface for HTTP body types -including streaming and fully-buffered bodies. --/ - -namespace Std.Http - -set_option linter.all true - -open Std Internal IO Async - -/-- -Typeclass that provides a uniform interface for HTTP body types. Implementations include -streaming bodies (`ByteStream`, `ChunkStream`) and fully-buffered bodies (`Full`). --/ -class Body (α : Type) (β : outParam Type) where - /-- - Non-blocking receive. Returns `none` if the stream is closed or has ended, - `some` if data is available. - -/ - recv? : α → Async (Option β) - - /-- - Blocking receive. Blocks if no data is available yet. Returns `none` if the stream - is closed or has ended, `some` if data becomes available. If an amount is specified, - accumulates bytes up to that size before returning. - -/ - recv : α → Option UInt64 → Async (Option β) - - /-- - Send data to the body. May block if the buffer is full. - -/ - send : α → β → Async Unit - - /-- - Checks if the body is closed. - -/ - isClosed : α → Async Bool - - /-- - Returns the known size of the body if available. - -/ - size? : α → Async (Option Body.Length) - - /-- - Creates an empty body. - -/ - empty : Async α - - /-- - Creates a `Selector` for multiplexing receive operations. Resolves once data is available - and provides it, or returns `none` when the body is closed. - -/ - recvSelector : α → Selector (Option β) - - /-- - Closes the stream - -/ - close : α → Async Unit - -instance : Body Body.ChunkStream Chunk where - recv? := Body.ChunkStream.tryRecv - recv := Body.ChunkStream.recv - send := Body.ChunkStream.send - isClosed := Body.ChunkStream.isClosed - size? := Body.ChunkStream.getKnownSize - empty := Body.ChunkStream.empty - recvSelector := Body.ChunkStream.recvSelector - close := Body.ChunkStream.close - -instance : Body Body.Full Chunk where - recv? full := do return (← Body.Full.recv? full).map Chunk.ofByteArray - recv full count := do return (← Body.Full.recv full count).map Chunk.ofByteArray - send full chunk := Body.Full.send full chunk.data - isClosed := Body.Full.isClosed - size? := Body.Full.size? - empty := Body.Full.empty - recvSelector := Body.Full.recvSelector - close := Body.Full.close - -instance : Body Body.Empty Chunk where - recv? empty := do return (← Body.Empty.recv? empty).map Chunk.ofByteArray - recv empty count := do return (← Body.Empty.recv empty count).map Chunk.ofByteArray - send empty chunk := Body.Empty.send empty chunk.data - isClosed := Body.Empty.isClosed - size? := Body.Empty.size? - empty := Body.Empty.new - recvSelector := Body.Empty.recvSelector - close := Body.Empty.close - -end Std.Http +public import Std.Internal.Http.Data.Body.Stream diff --git a/src/Std/Internal/Http/Data/Body/Basic.lean b/src/Std/Internal/Http/Data/Body/Basic.lean new file mode 100644 index 000000000000..6fd614a97009 --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Basic.lean @@ -0,0 +1,41 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Async.ContextAsync +public import Std.Internal.Http.Data.Headers +public import Std.Internal.Http.Data.Body.Length + +public section + +/-! +# Body + +This module defines shared types for HTTP body handling. +-/ + +namespace Std.Http.Body + +set_option linter.all true + +/-- +Typeclass for types that can be converted to a `ByteArray`. +-/ +class ToByteArray (α : Type) where + + /-- + Transforms into a `ByteArray` + -/ + toByteArray : α → ByteArray + +instance : ToByteArray ByteArray where + toByteArray := id + +instance : ToByteArray String where + toByteArray := String.toUTF8 + +end Std.Http.Body diff --git a/src/Std/Internal/Http/Data/Body/Empty.lean b/src/Std/Internal/Http/Data/Body/Empty.lean deleted file mode 100644 index e341f6f629eb..000000000000 --- a/src/Std/Internal/Http/Data/Body/Empty.lean +++ /dev/null @@ -1,121 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Internal.Async -public import Std.Internal.Http.Data.Chunk -public import Std.Internal.Http.Data.Request -public import Std.Internal.Http.Data.Response -public import Std.Internal.Http.Data.Body.Length - -public section - -/-! -# Empty - -An `Empty` represents an HTTP body with no content. This is useful for requests and responses -that do not carry a body, such as GET requests or 204 No Content responses. --/ - -namespace Std.Http.Body -open Std Internal IO Async - -set_option linter.all true - -/-- -An empty HTTP body type that contains no data. --/ -structure Empty where - private mk :: -deriving Inhabited, Nonempty - -namespace Empty - -/-- -The singleton empty body value. --/ -def val : Empty := Empty.mk - -/-- -Creates a new empty body. --/ -def new : Async Empty := - pure val - -/-- -Non-blocking receive. Always returns `none` since there is no data. --/ -def recv? (_ : Empty) : Async (Option ByteArray) := - pure none - -/-- -Blocking receive. Always returns `none` since there is no data. --/ -def recv (_ : Empty) (_ : Option UInt64) : Async (Option ByteArray) := - pure none - -/-- -Sending to an empty body is a no-op. --/ -def send (_ : Empty) (_ : ByteArray) : Async Unit := - pure () - -/-- -An empty body is always closed. --/ -def isClosed (_ : Empty) : Async Bool := - pure true - -/-- -Returns `none` since an empty body has no size. --/ -def size? (_ : Empty) : Async (Option Body.Length) := - pure (some (.fixed 0)) - -/-- -Closing an empty body is a no-op. --/ -def close (_ : Empty) : Async Unit := - pure () - -/-- -Creates a `Selector` that immediately resolves to `none`. --/ -def recvSelector (_ : Empty) : Selector (Option Chunk) where - tryFn := pure (some none) - registerFn waiter := do - let lose := return () - let win promise := promise.resolve (.ok none) - waiter.race lose win - unregisterFn := pure () - -instance : EmptyCollection Empty where - emptyCollection := val - -end Empty - -end Std.Http.Body - -namespace Std.Http.Request.Builder - -/-- -Builds a request with an empty body. --/ -def blank (builder : Builder) : Request Body.Empty := - { head := builder.head, body := Body.Empty.val } - -end Std.Http.Request.Builder - -namespace Std.Http.Response.Builder - -/-- -Builds a response with an empty body. --/ -def blank (builder : Builder) : Response Body.Empty := - { head := builder.head, body := Body.Empty.val } - -end Std.Http.Response.Builder diff --git a/src/Std/Internal/Http/Data/Body/Full.lean b/src/Std/Internal/Http/Data/Body/Full.lean deleted file mode 100644 index 278a75f50190..000000000000 --- a/src/Std/Internal/Http/Data/Body/Full.lean +++ /dev/null @@ -1,279 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Sync -public import Std.Internal.Async -public import Std.Internal.Http.Data.Chunk -public import Std.Internal.Http.Data.Request -public import Std.Internal.Http.Data.Response -public import Std.Internal.Http.Data.Body.Length -public import Init.Data.ByteArray - -public section - -/-! -# Full - -A `Full` represents a fully-buffered HTTP body that contains data which can be consumed exactly once. -It wraps a `ByteArray` in a `Mutex`-protected `Option`, tracking whether the data has already been -consumed. --/ - -namespace Std.Http.Body -open Std Internal IO Async - -set_option linter.all true - -/-- -Typeclass for types that can be converted to a `ByteArray`. --/ -class ToByteArray (α : Type) where - - /-- - Transforms into a `ByteArray` - -/ - toByteArray : α → ByteArray - -instance : ToByteArray ByteArray where - toByteArray := id - -instance : ToByteArray String where - toByteArray := String.toUTF8 - -open Internal.IO.Async in - -private structure Full.State where - /-- - The stored data as ByteArray. `some` if not yet consumed, `none` if already consumed or empty. - -/ - data : Option ByteArray - - /-- - Whether the body has been closed. - -/ - closed : Bool -deriving Nonempty - -/-- -A fully-buffered body that stores data as a `ByteArray`. The data can be consumed exactly once -via `recv`. After consumption, subsequent `recv` calls return `none`. --/ -structure Full where - private mk :: - private state : Mutex Full.State -deriving Nonempty - -namespace Full - -/-- -Creates a new `Full` body containing the given data converted to `ByteArray`. --/ -def new [ToByteArray β] (data : β) : Async Full := do - return { state := ← Mutex.new { data := some (ToByteArray.toByteArray data), closed := false } } - -/-- -Creates an empty `Full` body with no data. --/ -def empty : Async Full := do - return { state := ← Mutex.new { data := none, closed := true } } - -/-- -Closes a `Full` --/ -def close (full : Full) : Async Unit := do - full.state.atomically do - modify ({ · with closed := true }) - -/-- -Non-blocking receive. Returns the stored `ByteArray` if available and not yet consumed, -or `none` if the body is empty or already consumed. --/ -def recv? (full : Full) : Async (Option ByteArray) := do - full.state.atomically do - let st ← get - match st.data with - | some data => - set { st with data := none, closed := true } - return some data - | none => - return none - -/-- -Blocking receive. Since `Full` bodies are already fully buffered, this behaves the same as `recv?`. -Returns the stored `ByteArray` if available, or `none` if consumed or empty. -The amount parameter is ignored for fully-buffered bodies. --/ -def recv (full : Full) (_count : Option UInt64) : Async (Option ByteArray) := - full.recv? - -/-- -Sends data to the body, replacing any previously stored data. --/ -def send (full : Full) (data : ByteArray) : Async Unit := do - full.state.atomically do - modify fun st => { st with data := some data, closed := false } - -/-- -Checks if the body is closed (consumed or empty). --/ -def isClosed (full : Full) : Async Bool := do - full.state.atomically do - return (← get).closed - -/-- -Returns the known size of the body if data is available. --/ -def size? (full : Full) : Async (Option Body.Length) := do - full.state.atomically do - let st ← get - match st.data with - | some data => return some (.fixed data.size) - | none => return none - -open Internal.IO.Async in - -/-- -Creates a `Selector` that resolves once the `Full` body has data available and provides that -data as a `Chunk`. Returns `none` when the body is closed. --/ -def recvSelector (full : Full) : Selector (Option Chunk) where - tryFn := do - full.state.atomically do - let st ← get - match st.data with - | some data => - set { st with data := none, closed := true } - return some (some (Chunk.ofByteArray data)) - | none => - if st.closed then return some none - else return none - - registerFn waiter := do - let lose := return () - let win promise := do - let r ← full.recv? - match r with - | some data => promise.resolve (.ok (some (Chunk.ofByteArray data))) - | none => promise.resolve (.ok none) - waiter.race lose win - - unregisterFn := pure () - -end Body.Full - -namespace Request.Builder -open Internal.IO.Async - -/-- -Builds a request with a text body. Sets Content-Type to text/plain and Content-Length automatically. --/ -def text (builder : Builder) (content : String) : Async (Request Body.Full) := do - let bytes := content.toUTF8 - let body ← Body.Full.new bytes - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a request with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. --/ -def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) := do - let body ← Body.Full.new content - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a request with a JSON body. Sets Content-Type to application/json and Content-Length automatically. --/ -def json (builder : Builder) (content : String) : Async (Request Body.Full) := do - let bytes := content.toUTF8 - let body ← Body.Full.new bytes - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a request with an HTML body. Sets Content-Type to text/html and Content-Length automatically. --/ -def html (builder : Builder) (content : String) : Async (Request Body.Full) := do - let bytes := content.toUTF8 - let body ← Body.Full.new bytes - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a request with an empty Full body. --/ -def noBody (builder : Builder) : Async (Request Body.Full) := do - let body ← Body.Full.empty - return { head := builder.head, body } - -end Request.Builder - -namespace Response.Builder -open Internal.IO.Async - -/-- -Builds a response with a text body. Sets Content-Type to text/plain and Content-Length automatically. --/ -def text (builder : Builder) (content : String) : Async (Response Body.Full) := do - let bytes := content.toUTF8 - let body ← Body.Full.new bytes - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a response with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. --/ -def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) := do - let body ← Body.Full.new content - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a response with a JSON body. Sets Content-Type to application/json and Content-Length automatically. --/ -def json (builder : Builder) (content : String) : Async (Response Body.Full) := do - let bytes := content.toUTF8 - let body ← Body.Full.new bytes - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a response with an HTML body. Sets Content-Type to text/html and Content-Length automatically. --/ -def html (builder : Builder) (content : String) : Async (Response Body.Full) := do - let bytes := content.toUTF8 - let body ← Body.Full.new bytes - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } - -/-- -Builds a response with an empty Full body. --/ -def noBody (builder : Builder) : Async (Response Body.Full) := do - let body ← Body.Full.empty - return { head := builder.head, body } - -end Response.Builder - -end Std.Http diff --git a/src/Std/Internal/Http/Data/Body/ChunkStream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean similarity index 50% rename from src/Std/Internal/Http/Data/Body/ChunkStream.lean rename to src/Std/Internal/Http/Data/Body/Stream.lean index 3a62c0d13bfb..f3a9f315e569 100644 --- a/src/Std/Internal/Http/Data/Body/ChunkStream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -8,16 +8,20 @@ module prelude public import Std.Sync public import Std.Internal.Async +public import Std.Internal.Http.Data.Request +public import Std.Internal.Http.Data.Response public import Std.Internal.Http.Data.Chunk +public import Std.Internal.Http.Data.Body.Basic public import Std.Internal.Http.Data.Body.Length public import Init.Data.Queue +public import Init.Data.ByteArray public section /-! -# ChunkStream +# Body.Stream -A `ChunkStream` represents an asynchronous channel for streaming data in chunks. It provides an +A `Stream` represents an asynchronous channel for streaming data in chunks. It provides an interface for producers and consumers to exchange chunks with optional metadata (extensions), making it suitable for HTTP chunked transfer encoding and other streaming scenarios. -/ @@ -27,7 +31,7 @@ open Std Internal IO Async set_option linter.all true -namespace ChunkStream +namespace Stream open Internal.IO.Async in @@ -88,22 +92,22 @@ private structure State where knownSize : Option Body.Length deriving Nonempty -end ChunkStream +end Stream /-- A channel for chunks with support for chunk extensions. -/ -structure ChunkStream where +structure Stream where private mk :: - private state : Mutex ChunkStream.State -deriving Nonempty + private state : Mutex Stream.State +deriving Nonempty, TypeName -namespace ChunkStream +namespace Stream /-- -Creates a new ChunkStream with a specified capacity. +Creates a new Stream with a specified capacity. -/ -def emptyWithCapacity (capacity : Nat := 128) : Async ChunkStream := do +def emptyWithCapacity (capacity : Nat := 128) : Async Stream := do return { state := ← Mutex.new { values := ∅ @@ -117,10 +121,10 @@ def emptyWithCapacity (capacity : Nat := 128) : Async ChunkStream := do } /-- -Creates a new ChunkStream with default capacity. +Creates a new Stream with default capacity. -/ @[always_inline, inline] -def empty : Async ChunkStream := +def empty : Async Stream := emptyWithCapacity private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length := @@ -161,11 +165,11 @@ private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT Base Attempts to receive a chunk from the stream. Returns `some` with a chunk when data is available, or `none` when the stream is closed or no data is available. -/ -def tryRecv (stream : ChunkStream) : Async (Option Chunk) := +def tryRecv (stream : Stream) : Async (Option Chunk) := stream.state.atomically do tryRecv' -private def recv' (stream : ChunkStream) : BaseIO (Task (Option Chunk)) := do +private def recv' (stream : Stream) : BaseIO (Task (Option Chunk)) := do stream.state.atomically do if let some chunk ← tryRecv' then return .pure <| some chunk @@ -180,7 +184,7 @@ private def recv' (stream : ChunkStream) : BaseIO (Task (Option Chunk)) := do Receives a chunk from the stream. Blocks if no data is available yet. Returns `none` if the stream is closed and no data is available. The amount parameter is ignored for chunk streams. -/ -def recv (stream : ChunkStream) (_count : Option UInt64) : Async (Option Chunk) := do +def recv (stream : Stream) (_count : Option UInt64) : Async (Option Chunk) := do Async.ofTask (← recv' stream) private def trySend' (chunk : Chunk) : AtomicT State BaseIO Bool := do @@ -203,14 +207,14 @@ private def trySend' (chunk : Chunk) : AtomicT State BaseIO Bool := do return false return true -private def trySend (stream : ChunkStream) (chunk : Chunk) : BaseIO Bool := do +private def trySend (stream : Stream) (chunk : Chunk) : BaseIO Bool := do stream.state.atomically do if (← get).closed then return false else trySend' chunk -private def send' (stream : ChunkStream) (chunk : Chunk) : BaseIO (Task (Except IO.Error Unit)) := do +private def send' (stream : Stream) (chunk : Chunk) : BaseIO (Task (Except IO.Error Unit)) := do stream.state.atomically do if (← get).closed then return .pure <| .error (.userError "channel closed") @@ -226,7 +230,7 @@ private def send' (stream : ChunkStream) (chunk : Chunk) : BaseIO (Task (Except /-- Sends a chunk to the stream. Blocks if the buffer is full. -/ -def send (stream : ChunkStream) (chunk : Chunk) : Async Unit := do +def send (stream : Stream) (chunk : Chunk) : Async Unit := do if chunk.data.isEmpty then return @@ -237,7 +241,7 @@ def send (stream : ChunkStream) (chunk : Chunk) : Async Unit := do Gets the known size of the stream if available. Returns `none` if the size is not known. -/ @[always_inline, inline] -def getKnownSize (stream : ChunkStream) : Async (Option Body.Length) := do +def getKnownSize (stream : Stream) : Async (Option Body.Length) := do stream.state.atomically do return (← get).knownSize @@ -245,7 +249,7 @@ def getKnownSize (stream : ChunkStream) : Async (Option Body.Length) := do Sets the known size of the stream. Use this when the total expected size is known ahead of time. -/ @[always_inline, inline] -def setKnownSize (stream : ChunkStream) (size : Option Body.Length) : Async Unit := do +def setKnownSize (stream : Stream) (size : Option Body.Length) : Async Unit := do stream.state.atomically do modify fun st => { st with knownSize := size } @@ -253,7 +257,7 @@ def setKnownSize (stream : ChunkStream) (size : Option Body.Length) : Async Unit Closes the stream, preventing further sends and causing pending/future recv operations to return `none` when no data is available. -/ -def close (stream : ChunkStream) : Async Unit := do +def close (stream : Stream) : Async Unit := do stream.state.atomically do let st ← get if st.closed then return () @@ -267,7 +271,7 @@ def close (stream : ChunkStream) : Async Unit := do Checks if the stream is closed. -/ @[always_inline, inline] -def isClosed (stream : ChunkStream) : Async Bool := do +def isClosed (stream : Stream) : Async Bool := do stream.state.atomically do return (← get).closed @@ -280,9 +284,9 @@ private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : open Internal.IO.Async in /-- -Creates a `Selector` that resolves once the `ChunkStream` has data available and provides that data. +Creates a `Selector` that resolves once the `Stream` has data available and provides that data. -/ -def recvSelector (stream : ChunkStream) : Selector (Option Chunk) where +def recvSelector (stream : Stream) : Selector (Option Chunk) where tryFn := do stream.state.atomically do if ← recvReady' then @@ -311,15 +315,21 @@ def recvSelector (stream : ChunkStream) : Selector (Option Chunk) where | .select waiter => return !(← waiter.checkFinished) set { st with consumers } +/-- +Sends data to the stream and writes a chunk to it. +-/ +def writeChunk (stream : Stream) (chunk : Chunk) : Async Unit := + stream.send chunk + /-- Iterate over the stream content in chunks, processing each chunk with the given step function. -/ @[inline] protected partial def forIn - {β : Type} (stream : ChunkStream) (acc : β) + {β : Type} (stream : Stream) (acc : β) (step : Chunk → β → Async (ForInStep β)) : Async β := do - let rec @[specialize] loop (stream : ChunkStream) (acc : β) : Async β := do + let rec @[specialize] loop (stream : Stream) (acc : β) : Async β := do if let some chunk ← stream.recv none then match ← step chunk acc with | .done res => return res @@ -334,10 +344,10 @@ Iterate over the stream content in chunks, processing each chunk with the given -/ @[inline] protected partial def forIn' - {β : Type} (stream : ChunkStream) (acc : β) + {β : Type} (stream : Stream) (acc : β) (step : Chunk → β → ContextAsync (ForInStep β)) : ContextAsync β := do - let rec @[specialize] loop (stream : ChunkStream) (acc : β) : ContextAsync β := do + let rec @[specialize] loop (stream : Stream) (acc : β) : ContextAsync β := do let data ← Selectable.one #[ .case (stream.recvSelector) pure, .case (← ContextAsync.doneSelector) (fun _ => pure none), @@ -352,10 +362,178 @@ protected partial def forIn' loop stream acc -instance : ForIn Async ChunkStream Chunk where - forIn := Std.Http.Body.ChunkStream.forIn +instance : ForIn Async Stream Chunk where + forIn := Std.Http.Body.Stream.forIn + +instance : ForIn ContextAsync Stream Chunk where + forIn := Std.Http.Body.Stream.forIn' + +end Std.Http.Body.Stream + +namespace Std.Http.Request.Builder +open Internal.IO.Async + +/-- +Builds a request with a streaming body. The generator function receives the `Stream` and +can write chunks to it asynchronously. +-/ +def stream (builder : Builder) (gen : Body.Stream → Async Unit) : Async (Request Body.Stream) := do + let body ← Body.Stream.empty + background (gen body) + return { head := builder.head, body } + +/-- +Builds a request with an empty body. +-/ +def blank (builder : Builder) : Async (Request Body.Stream) := do + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed 0)) + body.close + return { head := builder.head, body } + +/-- +Builds a request with a text body. Sets Content-Type to text/plain and Content-Length automatically. +-/ +def text (builder : Builder) (content : String) : Async (Request Body.Stream) := do + let bytes := content.toUTF8 + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed bytes.size)) + body.send (Chunk.ofByteArray bytes) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. +-/ +def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Stream) := do + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed content.size)) + body.send (Chunk.ofByteArray content) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with a JSON body. Sets Content-Type to application/json and Content-Length automatically. +-/ +def json (builder : Builder) (content : String) : Async (Request Body.Stream) := do + let bytes := content.toUTF8 + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed bytes.size)) + body.send (Chunk.ofByteArray bytes) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with an HTML body. Sets Content-Type to text/html and Content-Length automatically. +-/ +def html (builder : Builder) (content : String) : Async (Request Body.Stream) := do + let bytes := content.toUTF8 + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed bytes.size)) + body.send (Chunk.ofByteArray bytes) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a request with an empty body (alias for blank). +-/ +def noBody (builder : Builder) : Async (Request Body.Stream) := + builder.blank + +end Std.Http.Request.Builder -instance : ForIn ContextAsync ChunkStream Chunk where - forIn := Std.Http.Body.ChunkStream.forIn' +namespace Std.Http.Response.Builder +open Internal.IO.Async + +/-- +Builds a response with a streaming body. The generator function receives the `Stream` and +can write chunks to it asynchronously. +-/ +def stream (builder : Builder) (gen : Body.Stream → Async Unit) : Async (Response Body.Stream) := do + let body ← Body.Stream.empty + background (gen body) + return { head := builder.head, body } + +/-- +Builds a response with an empty body. +-/ +def blank (builder : Builder) : Async (Response Body.Stream) := do + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed 0)) + body.close + return { head := builder.head, body } + +/-- +Builds a response with a text body. Sets Content-Type to text/plain and Content-Length automatically. +-/ +def text (builder : Builder) (content : String) : Async (Response Body.Stream) := do + let bytes := content.toUTF8 + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed bytes.size)) + body.send (Chunk.ofByteArray bytes) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. +-/ +def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Stream) := do + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed content.size)) + body.send (Chunk.ofByteArray content) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with a JSON body. Sets Content-Type to application/json and Content-Length automatically. +-/ +def json (builder : Builder) (content : String) : Async (Response Body.Stream) := do + let bytes := content.toUTF8 + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed bytes.size)) + body.send (Chunk.ofByteArray bytes) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with an HTML body. Sets Content-Type to text/html and Content-Length automatically. +-/ +def html (builder : Builder) (content : String) : Async (Response Body.Stream) := do + let bytes := content.toUTF8 + let body ← Body.Stream.empty + body.setKnownSize (some (.fixed bytes.size)) + body.send (Chunk.ofByteArray bytes) + body.close + let headers := builder.head.headers + |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") + |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) + return { head := { builder.head with headers }, body } + +/-- +Builds a response with an empty body (alias for blank). +-/ +def noBody (builder : Builder) : Async (Response Body.Stream) := + builder.blank -end Std.Http.Body.ChunkStream +end Std.Http.Response.Builder diff --git a/src/Std/Internal/Http/Internal/ChunkedBuffer.lean b/src/Std/Internal/Http/Internal/ChunkedBuffer.lean index d2c4c871c7ab..e3f3eb54e917 100644 --- a/src/Std/Internal/Http/Internal/ChunkedBuffer.lean +++ b/src/Std/Internal/Http/Internal/ChunkedBuffer.lean @@ -68,6 +68,7 @@ Writes a `ChunkedBuffer` to the `ChunkedBuffer`. -/ @[inline] def append (buffer : ChunkedBuffer) (data : ChunkedBuffer) : ChunkedBuffer := + -- Queue.enqueueAll prepends to eList, so reverse to maintain FIFO order { data := buffer.data.enqueueAll data.data.toArray.toList.reverse, size := buffer.size + data.size } /-- @@ -89,13 +90,12 @@ def writeString (buffer : ChunkedBuffer) (data : String) : ChunkedBuffer := Turn the combined structure into a single contiguous ByteArray. -/ @[inline] -def toByteArray (c : ChunkedBuffer) : ByteArray := - let c := c.data.toArray - - if h : 1 = c.size then - c[0]'(Nat.le_of_eq h) +def toByteArray (cb : ChunkedBuffer) : ByteArray := + let arr := cb.data.toArray + if h : 1 = arr.size then + arr[0]'(Nat.le_of_eq h) else - c.foldl (· ++ ·) (.emptyWithCapacity c.size) + arr.foldl (· ++ ·) (.emptyWithCapacity cb.size) /-- Build from a ByteArray directly. diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index ae7dd303e449..32e74024727d 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -4,58 +4,58 @@ open Std.Internal.IO Async open Std.Http open Std.Http.Body -/-! ## ChunkStream tests -/ +/-! ## Stream tests -/ -- Test send followed by recv returns the chunk -def chunkSendRecv : Async Unit := do - let stream ← ChunkStream.empty +def streamSendRecv : Async Unit := do + let stream ← Stream.empty let chunk := Chunk.ofByteArray "hello".toUTF8 stream.send chunk let result ← stream.recv none assert! result.isSome assert! result.get!.data == "hello".toUTF8 -#eval chunkSendRecv.block +#eval streamSendRecv.block -- Test tryRecv on empty stream returns none -def chunkTryRecvEmpty : Async Unit := do - let stream ← ChunkStream.empty +def streamTryRecvEmpty : Async Unit := do + let stream ← Stream.empty let result ← stream.tryRecv assert! result.isNone -#eval chunkTryRecvEmpty.block +#eval streamTryRecvEmpty.block -- Test tryRecv returns data when available -def chunkTryRecvWithData : Async Unit := do - let stream ← ChunkStream.empty +def streamTryRecvWithData : Async Unit := do + let stream ← Stream.empty stream.send (Chunk.ofByteArray "data".toUTF8) let result ← stream.tryRecv assert! result.isSome assert! result.get!.data == "data".toUTF8 -#eval chunkTryRecvWithData.block +#eval streamTryRecvWithData.block -- Test close sets the closed flag -def chunkClose : Async Unit := do - let stream ← ChunkStream.empty +def streamClose : Async Unit := do + let stream ← Stream.empty assert! !(← stream.isClosed) stream.close assert! (← stream.isClosed) -#eval chunkClose.block +#eval streamClose.block -- Test recv on closed stream returns none -def chunkRecvAfterClose : Async Unit := do - let stream ← ChunkStream.empty +def streamRecvAfterClose : Async Unit := do + let stream ← Stream.empty stream.close let result ← stream.recv none assert! result.isNone -#eval chunkRecvAfterClose.block +#eval streamRecvAfterClose.block -- Test FIFO ordering of multiple chunks -def chunkMultipleFIFO : Async Unit := do - let stream ← ChunkStream.empty +def streamMultipleFIFO : Async Unit := do + let stream ← Stream.empty stream.send (Chunk.ofByteArray "one".toUTF8) stream.send (Chunk.ofByteArray "two".toUTF8) stream.send (Chunk.ofByteArray "three".toUTF8) @@ -66,11 +66,11 @@ def chunkMultipleFIFO : Async Unit := do assert! r2.get!.data == "two".toUTF8 assert! r3.get!.data == "three".toUTF8 -#eval chunkMultipleFIFO.block +#eval streamMultipleFIFO.block -- Test for-in iteration collects all chunks until close -def chunkForIn : Async Unit := do - let stream ← ChunkStream.empty +def streamForIn : Async Unit := do + let stream ← Stream.empty stream.send (Chunk.ofByteArray "a".toUTF8) stream.send (Chunk.ofByteArray "b".toUTF8) stream.close @@ -80,11 +80,11 @@ def chunkForIn : Async Unit := do acc := acc ++ chunk.data assert! acc == "ab".toUTF8 -#eval chunkForIn.block +#eval streamForIn.block -- Test chunks preserve extensions -def chunkExtensions : Async Unit := do - let stream ← ChunkStream.empty +def streamExtensions : Async Unit := do + let stream ← Stream.empty let chunk := { data := "hello".toUTF8, extensions := #[("key", some "value")] : Chunk } stream.send chunk let result ← stream.recv none @@ -92,20 +92,20 @@ def chunkExtensions : Async Unit := do assert! result.get!.extensions.size == 1 assert! result.get!.extensions[0]! == ("key", some "value") -#eval chunkExtensions.block +#eval streamExtensions.block -- Test set/get known size -def chunkKnownSize : Async Unit := do - let stream ← ChunkStream.empty +def streamKnownSize : Async Unit := do + let stream ← Stream.empty stream.setKnownSize (some (.fixed 100)) let size ← stream.getKnownSize assert! size == some (.fixed 100) -#eval chunkKnownSize.block +#eval streamKnownSize.block -- Test capacity: filling up to capacity succeeds via tryRecv check -def chunkCapacityFull : Async Unit := do - let stream ← ChunkStream.emptyWithCapacity (capacity := 3) +def streamCapacityFull : Async Unit := do + let stream ← Stream.emptyWithCapacity (capacity := 3) stream.send (Chunk.ofByteArray "a".toUTF8) stream.send (Chunk.ofByteArray "b".toUTF8) stream.send (Chunk.ofByteArray "c".toUTF8) @@ -119,11 +119,11 @@ def chunkCapacityFull : Async Unit := do assert! r3.get!.data == "c".toUTF8 assert! r4.isNone -#eval chunkCapacityFull.block +#eval streamCapacityFull.block -- Test capacity: send blocks when buffer is full and resumes after recv -def chunkCapacityBackpressure : Async Unit := do - let stream ← ChunkStream.emptyWithCapacity (capacity := 2) +def streamCapacityBackpressure : Async Unit := do + let stream ← Stream.emptyWithCapacity (capacity := 2) stream.send (Chunk.ofByteArray "a".toUTF8) stream.send (Chunk.ofByteArray "b".toUTF8) @@ -144,11 +144,11 @@ def chunkCapacityBackpressure : Async Unit := do assert! r2.get!.data == "b".toUTF8 assert! r3.get!.data == "c".toUTF8 -#eval chunkCapacityBackpressure.block +#eval streamCapacityBackpressure.block -- Test capacity 1: only one chunk at a time -def chunkCapacityOne : Async Unit := do - let stream ← ChunkStream.emptyWithCapacity (capacity := 1) +def streamCapacityOne : Async Unit := do + let stream ← Stream.emptyWithCapacity (capacity := 1) stream.send (Chunk.ofByteArray "first".toUTF8) let sendTask ← async (t := AsyncTask) <| @@ -162,11 +162,11 @@ def chunkCapacityOne : Async Unit := do let r2 ← stream.recv none assert! r2.get!.data == "second".toUTF8 -#eval chunkCapacityOne.block +#eval streamCapacityOne.block -- Test close unblocks pending producers -def chunkCloseUnblocksProducers : Async Unit := do - let stream ← ChunkStream.emptyWithCapacity (capacity := 1) +def streamCloseUnblocksProducers : Async Unit := do + let stream ← Stream.emptyWithCapacity (capacity := 1) stream.send (Chunk.ofByteArray "fill".toUTF8) -- This send should block because buffer is full @@ -181,160 +181,9 @@ def chunkCloseUnblocksProducers : Async Unit := do await sendTask -#eval chunkCloseUnblocksProducers.block +#eval streamCloseUnblocksProducers.block - -/-! ## Full tests -/ - --- Test ofData followed by recv -def fullOfData : Async Unit := do - let full ← Full.new "hello".toUTF8 - let result ← full.recv none - assert! result.isSome - assert! result.get! == "hello".toUTF8 - -#eval fullOfData.block - --- Test data is consumed exactly once -def fullConsumedOnce : Async Unit := do - let full ← Full.new "data".toUTF8 - let r1 ← full.recv none - let r2 ← full.recv none - assert! r1.isSome - assert! r2.isNone - -#eval fullConsumedOnce.block - --- Test empty Full returns none immediately -def fullEmpty : Async Unit := do - let full ← Full.empty - let result ← full.recv none - assert! result.isNone - -#eval fullEmpty.block - --- Test isClosed transitions after consumption -def fullClosedAfterConsume : Async Unit := do - let full ← Full.new "data".toUTF8 - assert! !(← full.isClosed) - discard <| full.recv none - assert! (← full.isClosed) - -#eval fullClosedAfterConsume.block - --- Test empty Full is already closed -def fullEmptyIsClosed : Async Unit := do - let full ← Full.empty - assert! (← full.isClosed) - -#eval fullEmptyIsClosed.block - --- Test size? returns byte count -def fullSize : Async Unit := do - let full ← Full.new "hello".toUTF8 - let size ← full.size? - assert! size == some (.fixed 5) - -#eval fullSize.block - --- Test size? returns none after consumption -def fullSizeAfterConsume : Async Unit := do - let full ← Full.new "hello".toUTF8 - discard <| full.recv none - let size ← full.size? - assert! size == none - -#eval fullSizeAfterConsume.block - --- Test send replaces data -def fullSendReplacesData : Async Unit := do - let full ← Full.empty - full.send "new data".toUTF8 - assert! !(← full.isClosed) - let result ← full.recv none - assert! result.isSome - assert! result.get! == "new data".toUTF8 - -#eval fullSendReplacesData.block - --- Test recv? behaves the same as recv -def fullRecvQuestion : Async Unit := do - let full ← Full.new "test".toUTF8 - let r1 ← full.recv? - assert! r1.isSome - assert! r1.get! == "test".toUTF8 - let r2 ← full.recv? - assert! r2.isNone - -#eval fullRecvQuestion.block - --- Test Full from String type -def fullFromString : Async Unit := do - let full ← Full.new (β := String) "hello world" - let result ← full.recv none - assert! result.isSome - assert! result.get! == "hello world".toUTF8 - -#eval fullFromString.block - -/-! ## Body typeclass tests -/ - --- Test Body instance for ChunkStream -def bodyChunkStream : Async Unit := do - let stream : ChunkStream ← Body.empty - Body.send stream (Chunk.ofByteArray "hello".toUTF8) - let result ← Body.recv stream none - assert! result.isSome - assert! result.get!.data == "hello".toUTF8 - assert! !(← Body.isClosed stream) - -#eval bodyChunkStream.block - --- Test Body instance for Full -def bodyFull : Async Unit := do - let full ← Full.new "hello".toUTF8 - let result ← @Body.recv Body.Full Chunk _ full none - assert! result.isSome - assert! result.get!.data == "hello".toUTF8 - assert! (← @Body.isClosed Body.Full Chunk _ full) - -#eval bodyFull.block - -/-! ## Empty body tests -/ - --- Test Empty body recv returns none -def emptyRecv : Async Unit := do - let empty ← Body.Empty.new - let result ← empty.recv none - assert! result.isNone - -#eval emptyRecv.block - --- Test Empty body is always closed -def emptyIsClosed : Async Unit := do - let empty ← Body.Empty.new - assert! (← empty.isClosed) - -#eval emptyIsClosed.block - --- Test Empty body size is 0 -def emptySize : Async Unit := do - let empty ← Body.Empty.new - let size ← empty.size? - assert! size == some (.fixed 0) - -#eval emptySize.block - --- Test Body instance for Empty -def bodyEmpty : Async Unit := do - let empty : Body.Empty ← Body.empty - let result ← @Body.recv Body.Empty Chunk _ empty none - assert! result.isNone - assert! (← @Body.isClosed Body.Empty Chunk _ empty) - -#eval bodyEmpty.block - -/-! ## Request.Builder Full body tests -/ +/-! ## Request.Builder body tests -/ -- Test Request.Builder.text sets correct headers def requestBuilderText : Async Unit := do @@ -342,9 +191,9 @@ def requestBuilderText : Async Unit := do |>.text "Hello, World!" assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") - let body ← req.body.recv? + let body ← req.body.tryRecv assert! body.isSome - assert! body.get! == "Hello, World!".toUTF8 + assert! body.get!.data == "Hello, World!".toUTF8 #eval requestBuilderText.block @@ -353,9 +202,9 @@ def requestBuilderJson : Async Unit := do let req ← Request.post (.originForm! "/api") |>.json "{\"key\": \"value\"}" assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") - let body ← req.body.recv? + let body ← req.body.tryRecv assert! body.isSome - assert! body.get! == "{\"key\": \"value\"}".toUTF8 + assert! body.get!.data == "{\"key\": \"value\"}".toUTF8 #eval requestBuilderJson.block @@ -366,9 +215,9 @@ def requestBuilderBytes : Async Unit := do |>.bytes data assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/octet-stream") assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "3") - let body ← req.body.recv? + let body ← req.body.tryRecv assert! body.isSome - assert! body.get! == data + assert! body.get!.data == data #eval requestBuilderBytes.block @@ -377,7 +226,7 @@ def requestBuilderHtml : Async Unit := do let req ← Request.post (.originForm! "/api") |>.html "" assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/html; charset=utf-8") - let body ← req.body.recv? + let body ← req.body.tryRecv assert! body.isSome #eval requestBuilderHtml.block @@ -386,12 +235,12 @@ def requestBuilderHtml : Async Unit := do def requestBuilderNoBody : Async Unit := do let req ← Request.get (.originForm! "/api") |>.noBody - let body ← req.body.recv? + let body ← req.body.tryRecv assert! body.isNone #eval requestBuilderNoBody.block -/-! ## Response.Builder Full body tests -/ +/-! ## Response.Builder body tests -/ -- Test Response.Builder.text sets correct headers def responseBuilderText : Async Unit := do @@ -399,9 +248,9 @@ def responseBuilderText : Async Unit := do |>.text "Hello, World!" assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") - let body ← res.body.recv? + let body ← res.body.tryRecv assert! body.isSome - assert! body.get! == "Hello, World!".toUTF8 + assert! body.get!.data == "Hello, World!".toUTF8 #eval responseBuilderText.block @@ -410,7 +259,7 @@ def responseBuilderJson : Async Unit := do let res ← Response.ok |>.json "{\"status\": \"ok\"}" assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") - let body ← res.body.recv? + let body ← res.body.tryRecv assert! body.isSome #eval responseBuilderJson.block @@ -419,7 +268,7 @@ def responseBuilderJson : Async Unit := do def responseBuilderNoBody : Async Unit := do let res ← Response.ok |>.noBody - let body ← res.body.recv? + let body ← res.body.tryRecv assert! body.isNone #eval responseBuilderNoBody.block From 4c64f2c2e8ebc9f501cb84b44a61d7adb421d27d Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Mon, 9 Feb 2026 21:55:38 -0300 Subject: [PATCH 11/44] fix: suggestions --- src/Std/Internal/Http/Protocol/H1.lean | 27 +------------------ src/Std/Internal/Http/Protocol/H1/Config.lean | 2 +- .../Internal/Http/Protocol/H1/Message.lean | 23 +++------------- src/Std/Internal/Http/Protocol/H1/Reader.lean | 5 ++-- src/Std/Internal/Http/Protocol/H1/Writer.lean | 2 +- 5 files changed, 10 insertions(+), 49 deletions(-) diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean index 1cc79652c42d..197046637ff6 100644 --- a/src/Std/Internal/Http/Protocol/H1.lean +++ b/src/Std/Internal/Http/Protocol/H1.lean @@ -145,31 +145,6 @@ private def setFailure (machine : Machine dir) (error : H1.Error) : Machine dir private def updateKeepAlive (machine : Machine dir) (should : Bool) : Machine dir := { machine with keepAlive := machine.keepAlive ∧ should } --- Helper Functions - -private def isChunked (headers : Headers) : Option Bool := - if let some res := headers.get? Header.Name.transferEncoding then - let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower) - if encodings.isEmpty then - none - else - let chunkedCount := encodings.filter (· == "chunked") |>.size - let lastIsChunked := encodings.back? == some "chunked" - - if chunkedCount > 1 then - none - else if chunkedCount = 1 ∧ ¬lastIsChunked then - none - else - some lastIsChunked - else - some false - -private def extractBodyLengthFromHeaders (headers : Headers) : Option Body.Length := - match (headers.get? Header.Name.contentLength, isChunked headers) with - | (some cl, some false) => cl.value.toNat? >>= (some ∘ Body.Length.fixed) - | (_, some true) => some Body.Length.chunked - | _ => none private def checkMessageHead (message : Message.Head dir) : Option Body.Length := do match dir with @@ -404,7 +379,7 @@ def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir let machine := if machine.writer.knownSize.isNone then - match extractBodyLengthFromHeaders message.headers with + match message.getSize false with | some size => machine.setKnownSize size | none => machine else diff --git a/src/Std/Internal/Http/Protocol/H1/Config.lean b/src/Std/Internal/Http/Protocol/H1/Config.lean index 310c5f8df066..11fa9f178e12 100644 --- a/src/Std/Internal/Http/Protocol/H1/Config.lean +++ b/src/Std/Internal/Http/Protocol/H1/Config.lean @@ -47,7 +47,7 @@ structure Config where /-- The server name (for sending responses) or user agent (for sending requests) -/ - identityHeader : Option Header.Value := some (.new "LeanServer") + identityHeader : Option Header.Value := some (.mk "LeanServer") /-- Maximum length of HTTP method token (default: 16 bytes) diff --git a/src/Std/Internal/Http/Protocol/H1/Message.lean b/src/Std/Internal/Http/Protocol/H1/Message.lean index c4842a704b39..f4ab5896aaa0 100644 --- a/src/Std/Internal/Http/Protocol/H1/Message.lean +++ b/src/Std/Internal/Http/Protocol/H1/Message.lean @@ -73,24 +73,9 @@ def Message.Head.version (m : Message.Head dir) : Version := | .sending => Response.Head.version m private def isChunked (message : Message.Head dir) : Option Bool := - let headers := message.headers - - if let some res := headers.get? .transferEncoding then - let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower) - if encodings.isEmpty then - none - else - let chunkedCount := encodings.filter (· == "chunked") |>.size - let lastIsChunked := encodings.back? == some "chunked" - - if chunkedCount > 1 then - none - else if chunkedCount = 1 ∧ ¬lastIsChunked then - none - else - some lastIsChunked - else - some false + match message.headers.get? .transferEncoding with + | none => some false + | some v => Header.TransferEncoding.parse v |>.map (·.isChunked) /-- Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag. @@ -109,7 +94,7 @@ Checks whether the message indicates that the connection should be kept alive. -/ @[inline] def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool := - ¬message.headers.hasEntry .connection (.new "close") + ¬message.headers.hasEntry .connection (.mk "close") ∧ message.version = .v11 instance : Repr (Message.Head dir) := diff --git a/src/Std/Internal/Http/Protocol/H1/Reader.lean b/src/Std/Internal/Http/Protocol/H1/Reader.lean index ce2d7622a6f8..8c24ff18bc30 100644 --- a/src/Std/Internal/Http/Protocol/H1/Reader.lean +++ b/src/Std/Internal/Http/Protocol/H1/Reader.lean @@ -133,14 +133,15 @@ def hasFailed (reader : Reader dir) : Bool := /-- Feeds new data into the reader's input buffer. -If the current input is exhausted, replaces it; otherwise appends. +If the current input is exhausted, replaces it; otherwise compacts the buffer +by discarding already-parsed bytes before appending. -/ @[inline] def feed (data : ByteArray) (reader : Reader dir) : Reader dir := { reader with input := if reader.input.atEnd then data.iter - else { reader.input with array := reader.input.array ++ data } } + else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter } /-- Replaces the reader's input iterator with a new one. diff --git a/src/Std/Internal/Http/Protocol/H1/Writer.lean b/src/Std/Internal/Http/Protocol/H1/Writer.lean index afbc77922ee4..c4a43438996b 100644 --- a/src/Std/Internal/Http/Protocol/H1/Writer.lean +++ b/src/Std/Internal/Http/Protocol/H1/Writer.lean @@ -170,7 +170,7 @@ def determineTransferMode (writer : Writer dir) : Body.Length := if let some mode := writer.knownSize then mode else if writer.userClosedBody then - let size := writer.userData.foldl (fun x y => x + y.size) 0 + let size := writer.userData.foldl (fun x y => x + y.data.size) 0 .fixed size else .chunked From 5e3dce808865e7639ae3d2c18c95929bdcac6830 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Mon, 9 Feb 2026 21:57:26 -0300 Subject: [PATCH 12/44] fix: chunk stream will only deal with content-size of the chunks not with the wireFormatSize --- src/Std/Internal/Http/Data/Body/Stream.lean | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index f3a9f315e569..7bce56ed6bc6 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -129,7 +129,7 @@ def empty : Async Stream := private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length := match knownSize with - | some (.fixed res) => some (Body.Length.fixed (res - chunk.wireFormatSize)) + | some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size)) | _ => knownSize private def tryWakeProducer [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : From 9ad4ee304be2d3488d63093806bc4c2d6f384c48 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Tue, 10 Feb 2026 17:29:04 -0300 Subject: [PATCH 13/44] fix: imports --- src/Std/Internal/Http/Protocol/H1/Message.lean | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Std/Internal/Http/Protocol/H1/Message.lean b/src/Std/Internal/Http/Protocol/H1/Message.lean index f4ab5896aaa0..ebd96b71609a 100644 --- a/src/Std/Internal/Http/Protocol/H1/Message.lean +++ b/src/Std/Internal/Http/Protocol/H1/Message.lean @@ -6,6 +6,7 @@ Authors: Sofia Rodrigues module prelude +import Init.Data.Array public import Std.Internal.Http.Data public section From d004e175e253ad4bb75c127613a3d50591fc59b8 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 17:03:27 -0300 Subject: [PATCH 14/44] fix: error message --- src/Std/Internal/Http/Protocol/H1.lean | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean index 197046637ff6..5e79c9513944 100644 --- a/src/Std/Internal/Http/Protocol/H1.lean +++ b/src/Std/Internal/Http/Protocol/H1.lean @@ -481,10 +481,14 @@ partial def processWrite (machine : Machine dir) : Machine dir := private def handleReaderFailed (machine : Machine dir) (error : H1.Error) : Machine dir := let machine : Machine dir := match dir with - | .receiving => machine + | .receiving => + if machine.isWaitingMessage then + machine |>.setWriterState .waitingHeaders |>.disableKeepAlive |>.send { status := .badRequest } |>.userClosedBody + else + machine | .sending => machine machine From f02139f7cee23fc39c85e1f0c301bed2912ed8dc Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 17:14:52 -0300 Subject: [PATCH 15/44] fix: skipBytes --- src/Std/Internal/Parsec/ByteArray.lean | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/src/Std/Internal/Parsec/ByteArray.lean b/src/Std/Internal/Parsec/ByteArray.lean index fdc4067e62fa..787191384fbb 100644 --- a/src/Std/Internal/Parsec/ByteArray.lean +++ b/src/Std/Internal/Parsec/ByteArray.lean @@ -58,17 +58,20 @@ def skipByte (b : UInt8) : Parser Unit := Skip a sequence of bytes equal to the given `ByteArray`. -/ def skipBytes (arr : ByteArray) : Parser Unit := fun it => - if it.remainingBytes < arr.size then - .error it .eof - else - let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator := - if h : idx < arr.size then - match skipByte arr[idx] it with - | .success it' _ => go (idx + 1) it' - | .error it' err => .error it' err + let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator := + if h : idx < arr.size then + if hnext : it.hasNext then + let got := it.curr' hnext + let want := arr[idx] + if got = want then + go (idx + 1) (it.next' hnext) + else + .error it (.other s!"expected byte {want}, got {got}") else - .success it () - go 0 it + .error it (.other s!"unexpected end of input while matching {arr.size} bytes") + else + .success it () + go 0 it /-- Parse a string by matching its UTF-8 bytes, returns the string on success. From 2bc2080fbed54563fbb4b699b5499423da7b67d0 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 17:40:19 -0300 Subject: [PATCH 16/44] fix: bad request behavior --- src/Std/Internal/Http/Protocol/H1.lean | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean index 5e79c9513944..e58b7a61d767 100644 --- a/src/Std/Internal/Http/Protocol/H1.lean +++ b/src/Std/Internal/Http/Protocol/H1.lean @@ -482,7 +482,7 @@ private def handleReaderFailed (machine : Machine dir) (error : H1.Error) : Mach let machine : Machine dir := match dir with | .receiving => - if machine.isWaitingMessage then + if ¬machine.writer.sentMessage ∧ ¬machine.writer.isClosed then machine |>.setWriterState .waitingHeaders |>.disableKeepAlive From 3e9674eaa9aaf25e94f93e7f187093c59bcc41a8 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 18:08:16 -0300 Subject: [PATCH 17/44] feat: avoid more than one host --- src/Std/Internal/Http/Protocol/H1.lean | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean index e58b7a61d767..cfe9b16afbc6 100644 --- a/src/Std/Internal/Http/Protocol/H1.lean +++ b/src/Std/Internal/Http/Protocol/H1.lean @@ -148,7 +148,9 @@ private def updateKeepAlive (machine : Machine dir) (should : Bool) : Machine di private def checkMessageHead (message : Message.Head dir) : Option Body.Length := do match dir with - | .receiving => guard (message.headers.get? Header.Name.host |>.isSome) + | .receiving => do + let headers ← message.headers.getAll? Header.Name.host + guard (headers.size = 1) | .sending => pure () if let .receiving := dir then From 10c8a923e6957d04a14428959cdb35749d651120 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 18:48:10 -0300 Subject: [PATCH 18/44] feat: readAll functions --- src/Std/Internal/Http/Data/Body/Stream.lean | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 7bce56ed6bc6..5a116dec5420 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -368,6 +368,23 @@ instance : ForIn Async Stream Chunk where instance : ForIn ContextAsync Stream Chunk where forIn := Std.Http.Body.Stream.forIn' +/-- +Reads all remaining chunks from the stream and returns their concatenated data as a `ByteArray`. +Blocks until the stream is closed. +-/ +partial def readAll (stream : Stream) : ContextAsync ByteArray := do + let mut result := ByteArray.empty + for chunk in stream do + result := result ++ chunk.data + return result + +/-- +Reads all remaining chunks from the stream and returns their concatenated data as a `String`. +Blocks until the stream is closed. The data is interpreted as UTF-8. +-/ +partial def readAllString (stream : Stream) : ContextAsync String := do + return String.fromUTF8! (← stream.readAll) + end Std.Http.Body.Stream namespace Std.Http.Request.Builder From 7a1f8b2d305e45c5f8297843f09631eacfd8c6c3 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 19:09:45 -0300 Subject: [PATCH 19/44] fix: readAll --- src/Std/Internal/Http/Data/Body/Basic.lean | 20 ++++++++++++++++++++ src/Std/Internal/Http/Data/Body/Stream.lean | 18 ++++++++---------- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Basic.lean b/src/Std/Internal/Http/Data/Body/Basic.lean index 6fd614a97009..54ef13b51690 100644 --- a/src/Std/Internal/Http/Data/Body/Basic.lean +++ b/src/Std/Internal/Http/Data/Body/Basic.lean @@ -38,4 +38,24 @@ instance : ToByteArray ByteArray where instance : ToByteArray String where toByteArray := String.toUTF8 +/-- +Typeclass for types that can be decoded from a `ByteArray`. The conversion may fail with an error +message if the bytes are not valid for the target type. +-/ +class FromByteArray (α : Type) where + + /-- + Attempts to decode a `ByteArray` into the target type, returning an error message on failure. + -/ + fromByteArray : ByteArray → Except String α + +instance : FromByteArray ByteArray where + fromByteArray := .ok + +instance : FromByteArray String where + fromByteArray bs := + match String.fromUTF8? bs with + | some s => .ok s + | none => .error "invalid UTF-8 encoding" + end Std.Http.Body diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 5a116dec5420..1225e2f49cca 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -369,21 +369,19 @@ instance : ForIn ContextAsync Stream Chunk where forIn := Std.Http.Body.Stream.forIn' /-- -Reads all remaining chunks from the stream and returns their concatenated data as a `ByteArray`. -Blocks until the stream is closed. +Reads all remaining chunks from the stream and returns the concatenated data decoded as type `α` +using `FromByteArray`. Blocks until the stream is closed. Throws an `IO.Error` if the conversion +fails. -/ -partial def readAll (stream : Stream) : ContextAsync ByteArray := do +partial def readAllAs [FromByteArray α] (stream : Stream) : ContextAsync α := do let mut result := ByteArray.empty + for chunk in stream do result := result ++ chunk.data - return result -/-- -Reads all remaining chunks from the stream and returns their concatenated data as a `String`. -Blocks until the stream is closed. The data is interpreted as UTF-8. --/ -partial def readAllString (stream : Stream) : ContextAsync String := do - return String.fromUTF8! (← stream.readAll) + match FromByteArray.fromByteArray (α := α) result with + | .ok a => return a + | .error msg => throw (.userError msg) end Std.Http.Body.Stream From 9485e8f5ebcae0bf3c40323c60edecbedeec4336 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 19:35:31 -0300 Subject: [PATCH 20/44] revert: add toString head --- src/Std/Internal/Http/Data/Request.lean | 9 +++++++++ src/Std/Internal/Http/Data/Response.lean | 8 ++++++++ 2 files changed, 17 insertions(+) diff --git a/src/Std/Internal/Http/Data/Request.lean b/src/Std/Internal/Http/Data/Request.lean index 9ef8dd2b70b5..734fe063d96b 100644 --- a/src/Std/Internal/Http/Data/Request.lean +++ b/src/Std/Internal/Http/Data/Request.lean @@ -86,6 +86,15 @@ structure Request.Builder where namespace Request +instance : ToString Head where + toString req := + toString req.method ++ " " ++ + toString req.uri ++ " " ++ + toString req.version ++ + "\r\n" ++ + toString req.headers ++ + "\r\n" + open Internal in instance : Encode .v11 Head where encode buffer req := diff --git a/src/Std/Internal/Http/Data/Response.lean b/src/Std/Internal/Http/Data/Response.lean index 243377432a20..07bc46135302 100644 --- a/src/Std/Internal/Http/Data/Response.lean +++ b/src/Std/Internal/Http/Data/Response.lean @@ -85,6 +85,14 @@ structure Response.Builder where namespace Response +instance : ToString Head where + toString r := + toString r.version ++ " " ++ + toString r.status.toCode ++ " " ++ + toString r.status ++ "\r\n" ++ + toString r.headers ++ + "\r\n" + open Internal in instance : Encode .v11 Head where encode buffer r := From 8fe2d519d29e69aedf2c7ac25f5e0a2408bcc8bc Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 11 Feb 2026 19:40:34 -0300 Subject: [PATCH 21/44] revert: chunk changes --- src/Std/Internal/Http/Data/Chunk.lean | 20 ++++---------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/src/Std/Internal/Http/Data/Chunk.lean b/src/Std/Internal/Http/Data/Chunk.lean index abc3f203b342..d019bea44544 100644 --- a/src/Std/Internal/Http/Data/Chunk.lean +++ b/src/Std/Internal/Http/Data/Chunk.lean @@ -75,16 +75,6 @@ instance : Encode .v11 Chunk where let size := Nat.toDigits 16 chunkLen |>.toArray |>.map Char.toUInt8 |> ByteArray.mk buffer.append #[size, exts.toUTF8, "\r\n".toUTF8, chunk.data, "\r\n".toUTF8] -/-- -Returns the total wire format size of the chunk in bytes. This includes the hex-encoded data length -prefix, formatted extensions (`;name=value`), CRLF after the size line, the data itself, and the -trailing CRLF. --/ -def wireFormatSize (chunk : Chunk) : Nat := - let hexSize := (Nat.toDigits 16 chunk.data.size).length - let extensionsSize := chunk.extensions.foldl (fun acc (name, value) => acc + name.length + (value.map (fun v => v.length + 1) |>.getD 0) + 1) 0 - hexSize + extensionsSize + 2 + chunk.data.size + 2 - end Chunk /-- @@ -116,12 +106,10 @@ def header (trailer : Trailer) (key : String) (value : String) : Trailer := instance : Encode .v11 Trailer where encode buffer trailer := - let buffer := buffer.write "0\r\n".toUTF8 - - let buffer := trailer.headers.fold (init := buffer) fun acc key values => + let terminalChunk := "0\r\n".toUTF8 + let trailerFields := trailer.headers.fold (init := ByteArray.empty) fun acc key values => values.foldl (init := acc) fun acc value => - acc.write (key ++ ": " ++ value ++ "\r\n").toUTF8 - - buffer.write "\r\n".toUTF8 + acc ++ (key ++ ": " ++ value ++ "\r\n").toUTF8 + buffer.append #[terminalChunk, trailerFields, "\r\n".toUTF8] end Trailer From 058d95e44120f474ca3ec0583259f875eacbd608 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Thu, 12 Feb 2026 10:46:43 -0300 Subject: [PATCH 22/44] feat: maximum size in readAll --- src/Std/Internal/Http/Data/Body/Stream.lean | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 1225e2f49cca..30245ef2f9e3 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -369,17 +369,20 @@ instance : ForIn ContextAsync Stream Chunk where forIn := Std.Http.Body.Stream.forIn' /-- -Reads all remaining chunks from the stream and returns the concatenated data decoded as type `α` -using `FromByteArray`. Blocks until the stream is closed. Throws an `IO.Error` if the conversion -fails. +Reads all remaining chunks from the stream and returns the concatenated data as a `ByteArray`. +Blocks until the stream is closed. If `maximumSize` is provided, throws an `IO.Error` if the +total data exceeds that limit. -/ -partial def readAllAs [FromByteArray α] (stream : Stream) : ContextAsync α := do +partial def readAll [FromByteArray α] (stream : Stream) (maximumSize : Option UInt64 := none) : ContextAsync α := do let mut result := ByteArray.empty for chunk in stream do result := result ++ chunk.data + if let some max := maximumSize then + if result.size.toUInt64 > max then + throw (.userError s!"body exceeded maximum size of {max} bytes") - match FromByteArray.fromByteArray (α := α) result with + match FromByteArray.fromByteArray result with | .ok a => return a | .error msg => throw (.userError msg) From 976cc79b0c21746c3056b04a7cd3ce1a96ab9439 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 13 Feb 2026 00:45:38 -0300 Subject: [PATCH 23/44] feat: 100-continue --- src/Std/Internal/Http/Protocol/H1.lean | 43 +++++++++++++++++-- src/Std/Internal/Http/Protocol/H1/Event.lean | 5 +++ src/Std/Internal/Http/Protocol/H1/Reader.lean | 7 ++- 3 files changed, 50 insertions(+), 5 deletions(-) diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean index cfe9b16afbc6..256dafdde47a 100644 --- a/src/Std/Internal/Http/Protocol/H1.lean +++ b/src/Std/Internal/Http/Protocol/H1.lean @@ -159,6 +159,10 @@ private def checkMessageHead (message : Message.Head dir) : Option Body.Length : message.getSize true +@[inline] +private def hasExpectContinue (message : Message.Head dir) : Bool := + message.headers.hasEntry (.mk "expect") (Header.Value.ofString! "100-continue") + -- State Checks /-- @@ -275,13 +279,21 @@ private def processHeaders (machine : Machine dir) : Machine dir := match checkMessageHead machine.reader.messageHead with | none => machine.setFailure .badMessage | some size => - let size := match size with - | .fixed n => .needFixedBody n - | .chunked => .needChunkedSize + let state : Reader.State dir := match size with + | .fixed n => Reader.State.needFixedBody n + | .chunked => Reader.State.needChunkedSize let machine := machine.addEvent (.endHeaders machine.reader.messageHead) - machine.setReaderState size + let waitingContinue : Bool := + match dir with + | .receiving => hasExpectContinue machine.reader.messageHead + | .sending => false + + let nextState : Reader.State dir := if waitingContinue then Reader.State.«continue» state else state + let machine := if waitingContinue then machine.addEvent .continue else machine + + machine.setReaderState nextState |>.setWriterState .waitingHeaders |>.addEvent .needAnswer @@ -391,6 +403,26 @@ def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir else machine +/-- +Allow body processing to continue after receiving `Expect: 100-continue`. +-/ +def canContinue (machine : Machine dir) (status : Status) : Machine dir := + match dir, machine.reader.state with + | .sending, _ => machine + | .receiving, Reader.State.«continue» nextState => + if status == .«continue» then + let machine := machine.modifyWriter (fun writer => { + writer with outputData := Encode.encode (v := .v11) writer.outputData ({ status := .«continue» } : Response.Head) + }) + machine.setReaderState nextState + else + machine.send { status } + |>.userClosedBody + |>.disableKeepAlive + |>.closeReader + |>.setReaderState .closed + | .receiving, _ => machine + /--Send data to the socket. -/ @[inline] def sendData (machine : Machine dir) (data : Array Chunk) : Machine dir := @@ -609,6 +641,9 @@ partial def processRead (machine : Machine dir) : Machine dir := else machine + | Reader.State.«continue» _ => + machine + | .complete => if (machine.reader.noMoreInput ∧ machine.reader.input.atEnd) ∨ ¬machine.keepAlive then machine.setReaderState .closed diff --git a/src/Std/Internal/Http/Protocol/H1/Event.lean b/src/Std/Internal/Http/Protocol/H1/Event.lean index 11d2e3895366..6d8d798e29f1 100644 --- a/src/Std/Internal/Http/Protocol/H1/Event.lean +++ b/src/Std/Internal/Http/Protocol/H1/Event.lean @@ -70,4 +70,9 @@ inductive Event (dir : Direction) Indicates readiness to process the next message. -/ | next + + /-- + Indicates that it needs a continue. + -/ + | «continue» deriving Inhabited, Repr diff --git a/src/Std/Internal/Http/Protocol/H1/Reader.lean b/src/Std/Internal/Http/Protocol/H1/Reader.lean index 8c24ff18bc30..e7eb171d1009 100644 --- a/src/Std/Internal/Http/Protocol/H1/Reader.lean +++ b/src/Std/Internal/Http/Protocol/H1/Reader.lean @@ -57,6 +57,11 @@ inductive Reader.State (dir : Direction) : Type -/ | needFixedBody : Nat → State dir + /-- + Paused waiting for a `canContinue` decision, carrying the next state. + -/ + | continue : State dir → State dir + /-- State that it completed a single request or response and can go to the next one -/ @@ -205,7 +210,7 @@ Checks if more input is needed to continue parsing. def needsMoreInput (reader : Reader dir) : Bool := reader.input.atEnd && !reader.noMoreInput && match reader.state with - | .complete | .closed | .failed _ => false + | .complete | .closed | .failed _ | .«continue» _ => false | _ => true /-- From 0fd0fa9c730f13d31a02983802a30123013ba823 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 13 Feb 2026 01:54:26 -0300 Subject: [PATCH 24/44] fix: test --- tests/lean/run/async_http_body.lean | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 32e74024727d..976c90c8b4c0 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -85,12 +85,12 @@ def streamForIn : Async Unit := do -- Test chunks preserve extensions def streamExtensions : Async Unit := do let stream ← Stream.empty - let chunk := { data := "hello".toUTF8, extensions := #[("key", some "value")] : Chunk } + let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some "value")] : Chunk } stream.send chunk let result ← stream.recv none assert! result.isSome assert! result.get!.extensions.size == 1 - assert! result.get!.extensions[0]! == ("key", some "value") + assert! result.get!.extensions[0]! == (.mk "key", some "value") #eval streamExtensions.block From 4db36b214b51d31c1def015910441930e2e5f4f9 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 13 Feb 2026 02:11:38 -0300 Subject: [PATCH 25/44] feat: improve parser --- src/Std/Internal/Http/Protocol/H1/Parser.lean | 18 +++++++--- src/Std/Internal/Parsec/ByteArray.lean | 35 +++++++++++++++---- 2 files changed, 42 insertions(+), 11 deletions(-) diff --git a/src/Std/Internal/Http/Protocol/H1/Parser.lean b/src/Std/Internal/Http/Protocol/H1/Parser.lean index bde1598488dc..392808f29a63 100644 --- a/src/Std/Internal/Http/Protocol/H1/Parser.lean +++ b/src/Std/Internal/Http/Protocol/H1/Parser.lean @@ -83,13 +83,23 @@ def crlf : Parser Unit := do skipByte '\n'.toUInt8 @[inline] -def rsp (limits : H1.Config) : Parser Unit := +def rsp (limits : H1.Config) : Parser Unit := do discard <| takeWhileUpTo1 (· == ' '.toUInt8) limits.maxSpaceSequence + if (← peekWhen? (· == ' '.toUInt8)) |>.isSome then + fail "invalid space sequence" + else + pure () + @[inline] -def osp (limits : H1.Config) : Parser Unit := +def osp (limits : H1.Config) : Parser Unit := do discard <| takeWhileUpTo (· == ' '.toUInt8) limits.maxSpaceSequence + if (← peekWhen? (· == ' '.toUInt8)) |>.isSome then + fail "invalid space sequence" + else + pure () + @[inline] def uint8 : Parser UInt8 := do let d ← digit @@ -153,10 +163,10 @@ public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do -- field-line = field-name ":" OWS field-value OWS def parseFieldLine (limits : H1.Config) : Parser (String × String) := do let name ← token limits.maxHeaderNameLength - let value ← skipByte ':'.toUInt8 *> osp limits *> takeWhileUpTo1 isFieldVChar limits.maxHeaderValueLength <* osp limits + let value ← skipByte ':'.toUInt8 *> osp limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* osp limits let name ← opt <| String.fromUTF8? name.toByteArray - let value ← opt <| String.fromUTF8? value.toByteArray + let value ← opt <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty return (name, value) diff --git a/src/Std/Internal/Parsec/ByteArray.lean b/src/Std/Internal/Parsec/ByteArray.lean index 787191384fbb..c39110ae1c01 100644 --- a/src/Std/Internal/Parsec/ByteArray.lean +++ b/src/Std/Internal/Parsec/ByteArray.lean @@ -68,7 +68,7 @@ def skipBytes (arr : ByteArray) : Parser Unit := fun it => else .error it (.other s!"expected byte {want}, got {got}") else - .error it (.other s!"unexpected end of input while matching {arr.size} bytes") + .error it .eof else .success it () go 0 it @@ -218,7 +218,11 @@ partial def takeWhile (pred : UInt8 → Bool) : Parser ByteSlice := else (count, iter) let (length, newIt) := findEnd 0 it - .success newIt (it.array[it.idx...(it.idx + length)]) + + if newIt.atEnd then + .error newIt .eof + else + .success newIt (it.array[it.idx...(it.idx + length)]) /-- Parses until a predicate is satisfied (exclusive). @@ -238,7 +242,12 @@ partial def skipWhile (pred : UInt8 → Bool) : Parser Unit := else if pred iter.curr then findEnd (count + 1) iter.next else iter - .success (findEnd 0 it) () + let newIt := findEnd 0 it + + if newIt.atEnd then + .error newIt .eof + else + .success (findEnd 0 it) () /-- Skips until a predicate is satisfied. @@ -260,7 +269,11 @@ partial def takeWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSli else (count, iter) let (length, newIt) := findEnd 0 it - .success newIt (it.array[it.idx...(it.idx + length)]) + + if newIt.atEnd then + .error newIt .eof + else + .success newIt (it.array[it.idx...(it.idx + length)]) /-- Parses while a predicate is satisfied, up to a given limit, requiring at least one byte. @@ -275,8 +288,11 @@ def takeWhileUpTo1 (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice := else (count, iter) let (length, newIt) := findEnd 0 it - if length = 0 then - .error it (if newIt.atEnd then .eof else .other "expected at least one char") + + if newIt.atEnd then + .error newIt <| .eof + else if length = 0 then + .error newIt <| .other "expected at least one char" else .success newIt (it.array[it.idx...(it.idx + length)]) @@ -299,7 +315,12 @@ partial def skipWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser Unit := else if pred iter.curr then findEnd (count + 1) iter.next else iter - .success (findEnd 0 it) () + let newIt := findEnd 0 it + + if newIt.atEnd then + .error newIt .eof + else + .success newIt () /-- Skips until a predicate is satisfied, up to a given limit. From 4f20a815ec6a85d940cbbc66d9ef4062956efd68 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 13 Feb 2026 02:18:09 -0300 Subject: [PATCH 26/44] fix: extension name --- src/Std/Internal/Http/Protocol/H1/Event.lean | 2 +- src/Std/Internal/Http/Protocol/H1/Parser.lean | 10 +++++++--- src/Std/Internal/Http/Protocol/H1/Reader.lean | 2 +- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/Std/Internal/Http/Protocol/H1/Event.lean b/src/Std/Internal/Http/Protocol/H1/Event.lean index 6d8d798e29f1..91dc11537ad2 100644 --- a/src/Std/Internal/Http/Protocol/H1/Event.lean +++ b/src/Std/Internal/Http/Protocol/H1/Event.lean @@ -39,7 +39,7 @@ inductive Event (dir : Direction) /-- Carries a chunk of message body data. -/ - | gotData (final : Bool) (ext : Array (String × Option String)) (data : ByteSlice) + | gotData (final : Bool) (ext : Array (ExtensionName × Option String)) (data : ByteSlice) /-- Signals that additional input data is required to continue processing. diff --git a/src/Std/Internal/Http/Protocol/H1/Parser.lean b/src/Std/Internal/Http/Protocol/H1/Parser.lean index 392808f29a63..4719d58c9f03 100644 --- a/src/Std/Internal/Http/Protocol/H1/Parser.lean +++ b/src/Std/Internal/Http/Protocol/H1/Parser.lean @@ -215,13 +215,17 @@ partial def parseQuotedString : Parser String := do opt <| String.fromUTF8? (← loop .empty) -- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] ) -def parseChunkExt (limits : H1.Config) : Parser (String × Option String) := do +def parseChunkExt (limits : H1.Config) : Parser (ExtensionName × Option String) := do osp limits *> skipByte ';'.toUInt8 *> osp limits let name ← (opt =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtNameLength) <* osp limits + let some name := ExtensionName.ofString? name + | fail "invalid extension name" + if (← peekWhen? (· == '='.toUInt8)) |>.isSome then osp limits *> skipByte '='.toUInt8 *> osp limits let value ← osp limits *> (parseQuotedString <|> opt =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtValueLength)) + return (name, some value) return (name, none) @@ -229,7 +233,7 @@ def parseChunkExt (limits : H1.Config) : Parser (String × Option String) := do /-- This function parses the size and extension of a chunk -/ -public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (String × Option String)) := do +public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (ExtensionName × Option String)) := do let size ← hex let ext ← many (parseChunkExt limits) crlf @@ -245,7 +249,7 @@ public inductive TakeResult /-- This function parses a single chunk in chunked transfer encoding -/ -public def parseChunk (limits : H1.Config) : Parser (Option (Nat × Array (String × Option String) × ByteSlice)) := do +public def parseChunk (limits : H1.Config) : Parser (Option (Nat × Array (ExtensionName × Option String) × ByteSlice)) := do let (size, ext) ← parseChunkSize limits if size == 0 then return none diff --git a/src/Std/Internal/Http/Protocol/H1/Reader.lean b/src/Std/Internal/Http/Protocol/H1/Reader.lean index e7eb171d1009..200c044b2208 100644 --- a/src/Std/Internal/Http/Protocol/H1/Reader.lean +++ b/src/Std/Internal/Http/Protocol/H1/Reader.lean @@ -50,7 +50,7 @@ inductive Reader.State (dir : Direction) : Type /-- State waiting for chunk body data of specified size. -/ - | needChunkedBody : Array (String × Option String) → Nat → State dir + | needChunkedBody : Array (ExtensionName × Option String) → Nat → State dir /-- State waiting for fixed-length body data of specified size. From 4a641fc49811d63aa394f04571e38cfb1c962fef Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 13 Feb 2026 02:22:43 -0300 Subject: [PATCH 27/44] revert: bytearray parser --- src/Std/Internal/Parsec/ByteArray.lean | 35 ++++++-------------------- 1 file changed, 7 insertions(+), 28 deletions(-) diff --git a/src/Std/Internal/Parsec/ByteArray.lean b/src/Std/Internal/Parsec/ByteArray.lean index c39110ae1c01..787191384fbb 100644 --- a/src/Std/Internal/Parsec/ByteArray.lean +++ b/src/Std/Internal/Parsec/ByteArray.lean @@ -68,7 +68,7 @@ def skipBytes (arr : ByteArray) : Parser Unit := fun it => else .error it (.other s!"expected byte {want}, got {got}") else - .error it .eof + .error it (.other s!"unexpected end of input while matching {arr.size} bytes") else .success it () go 0 it @@ -218,11 +218,7 @@ partial def takeWhile (pred : UInt8 → Bool) : Parser ByteSlice := else (count, iter) let (length, newIt) := findEnd 0 it - - if newIt.atEnd then - .error newIt .eof - else - .success newIt (it.array[it.idx...(it.idx + length)]) + .success newIt (it.array[it.idx...(it.idx + length)]) /-- Parses until a predicate is satisfied (exclusive). @@ -242,12 +238,7 @@ partial def skipWhile (pred : UInt8 → Bool) : Parser Unit := else if pred iter.curr then findEnd (count + 1) iter.next else iter - let newIt := findEnd 0 it - - if newIt.atEnd then - .error newIt .eof - else - .success (findEnd 0 it) () + .success (findEnd 0 it) () /-- Skips until a predicate is satisfied. @@ -269,11 +260,7 @@ partial def takeWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSli else (count, iter) let (length, newIt) := findEnd 0 it - - if newIt.atEnd then - .error newIt .eof - else - .success newIt (it.array[it.idx...(it.idx + length)]) + .success newIt (it.array[it.idx...(it.idx + length)]) /-- Parses while a predicate is satisfied, up to a given limit, requiring at least one byte. @@ -288,11 +275,8 @@ def takeWhileUpTo1 (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice := else (count, iter) let (length, newIt) := findEnd 0 it - - if newIt.atEnd then - .error newIt <| .eof - else if length = 0 then - .error newIt <| .other "expected at least one char" + if length = 0 then + .error it (if newIt.atEnd then .eof else .other "expected at least one char") else .success newIt (it.array[it.idx...(it.idx + length)]) @@ -315,12 +299,7 @@ partial def skipWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser Unit := else if pred iter.curr then findEnd (count + 1) iter.next else iter - let newIt := findEnd 0 it - - if newIt.atEnd then - .error newIt .eof - else - .success newIt () + .success (findEnd 0 it) () /-- Skips until a predicate is satisfied, up to a given limit. From bf2ed2c87adf8a8cc9563d8ecde269e113924c9f Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 13 Feb 2026 10:20:35 -0300 Subject: [PATCH 28/44] revert: h1 --- src/Std/Internal/Http/Protocol/H1.lean | 668 ------------------ src/Std/Internal/Http/Protocol/H1/Config.lean | 97 --- src/Std/Internal/Http/Protocol/H1/Error.lean | 98 --- src/Std/Internal/Http/Protocol/H1/Event.lean | 78 -- .../Internal/Http/Protocol/H1/Message.lean | 115 --- src/Std/Internal/Http/Protocol/H1/Parser.lean | 328 --------- src/Std/Internal/Http/Protocol/H1/Reader.lean | 275 ------- src/Std/Internal/Http/Protocol/H1/Writer.lean | 265 ------- 8 files changed, 1924 deletions(-) delete mode 100644 src/Std/Internal/Http/Protocol/H1.lean delete mode 100644 src/Std/Internal/Http/Protocol/H1/Config.lean delete mode 100644 src/Std/Internal/Http/Protocol/H1/Error.lean delete mode 100644 src/Std/Internal/Http/Protocol/H1/Event.lean delete mode 100644 src/Std/Internal/Http/Protocol/H1/Message.lean delete mode 100644 src/Std/Internal/Http/Protocol/H1/Parser.lean delete mode 100644 src/Std/Internal/Http/Protocol/H1/Reader.lean delete mode 100644 src/Std/Internal/Http/Protocol/H1/Writer.lean diff --git a/src/Std/Internal/Http/Protocol/H1.lean b/src/Std/Internal/Http/Protocol/H1.lean deleted file mode 100644 index 256dafdde47a..000000000000 --- a/src/Std/Internal/Http/Protocol/H1.lean +++ /dev/null @@ -1,668 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Time -public import Std.Internal.Http.Data -public import Std.Internal.Http.Internal -public import Std.Internal.Http.Protocol.H1.Parser -public import Std.Internal.Http.Protocol.H1.Config -public import Std.Internal.Http.Protocol.H1.Message -public import Std.Internal.Http.Protocol.H1.Reader -public import Std.Internal.Http.Protocol.H1.Writer -public import Std.Internal.Http.Protocol.H1.Event - -public section - -/-! -# HTTP/1.1 Protocol State Machine - -This module implements the core HTTP/1.1 protocol state machine that handles -parsing requests/responses and generating output. The machine is direction-aware, -supporting both server mode (receiving requests) and client mode (receiving responses). --/ - -namespace Std.Http.Protocol.H1 - -set_option linter.all true - -open Std Internal Parsec ByteArray -open Internal - -/-- -Results from a single step of the state machine. --/ -structure StepResult (dir : Direction) where - - /-- - Events that occurred during this step (e.g., headers received, data available, errors). - -/ - events : Array (Event dir) := #[] - - /-- - Output data ready to be sent to the socket. - -/ - output : ChunkedBuffer := .empty - -/-- -The HTTP 1.1 protocol state machine. --/ -structure Machine (dir : Direction) where - - /-- - The state of the reader. - -/ - reader : Reader dir := {} - - /-- - The state of the writer. - -/ - writer : Writer dir := {} - - /-- - The configuration. - -/ - config : Config - - /-- - Events that happened during reading and writing. - -/ - events : Array (Event dir) := #[] - - /-- - Error thrown by the machine. - -/ - error : Option Error := none - - /-- - The timestamp for the `Date` header. - -/ - instant : Option (Std.Time.DateTime .UTC) := none - - /-- - If the connection will be kept alive after the message. - -/ - keepAlive : Bool := config.enableKeepAlive - - /-- - Whether a forced flush has been requested by the user. - -/ - forcedFlush : Bool := false - - /-- - Host header. - -/ - host : Option Header.Value := none - -namespace Machine - -@[inline] -private def modifyWriter (machine : Machine dir) (fn : Writer dir → Writer dir) : Machine dir := - { machine with writer := fn machine.writer } - -@[inline] -private def modifyReader (machine : Machine dir) (fn : Reader dir → Reader dir) : Machine dir := - { machine with reader := fn machine.reader } - -@[inline] -private def setReaderState (machine : Machine dir) (state : Reader.State dir) : Machine dir := - machine.modifyReader ({ · with state }) - -@[inline] -private def setWriterState (machine : Machine dir) (state : Writer.State) : Machine dir := - machine.modifyWriter ({ · with state }) - -@[inline] -private def addEvent (machine : Machine dir) (event : Event dir) : Machine dir := - { machine with events := machine.events.push event } - -@[inline] -private def setEvent (machine : Machine dir) (event : Option (Event dir)) : Machine dir := - match event with - | some event => machine.addEvent event - | none => machine - -@[inline] -private def setError (machine : Machine dir) (error : Error) : Machine dir := - { machine with error := some error } - -@[inline] -private def disableKeepAlive (machine : Machine dir) : Machine dir := - { machine with keepAlive := false } - -@[inline] -private def setFailure (machine : Machine dir) (error : H1.Error) : Machine dir := - machine - |>.addEvent (.failed error) - |>.setReaderState (.failed error) - |>.setError error - -@[inline] -private def updateKeepAlive (machine : Machine dir) (should : Bool) : Machine dir := - { machine with keepAlive := machine.keepAlive ∧ should } - - -private def checkMessageHead (message : Message.Head dir) : Option Body.Length := do - match dir with - | .receiving => do - let headers ← message.headers.getAll? Header.Name.host - guard (headers.size = 1) - | .sending => pure () - - if let .receiving := dir then - if message.method == .head ∨ message.method == .connect then - return .fixed 0 - - message.getSize true - -@[inline] -private def hasExpectContinue (message : Message.Head dir) : Bool := - message.headers.hasEntry (.mk "expect") (Header.Value.ofString! "100-continue") - --- State Checks - -/-- -Returns `true` if the reader is in a failed state. --/ -@[inline] -def failed (machine : Machine dir) : Bool := - match machine.reader.state with - | .failed _ => true - | _ => false - -/-- -Returns `true` if the reader has completed successfully. --/ -@[inline] -def isReaderComplete (machine : Machine dir) : Bool := - match machine.reader.state with - | .complete => true - | _ => false - -/-- -Returns `true` if the reader is closed. --/ -@[inline] -def isReaderClosed (machine : Machine dir) : Bool := - match machine.reader.state with - | .closed => true - | _ => false - -/-- -Returns `true` if the machine should flush buffered output. --/ -@[inline] -def shouldFlush (machine : Machine dir) : Bool := - machine.failed ∨ - machine.reader.state == .closed ∨ - machine.writer.isReadyToSend ∨ - machine.writer.knownSize.isSome - -/-- -Returns `true` if the writer is waiting for headers of a new message. --/ -@[inline] -def isWaitingMessage (machine : Machine dir) : Bool := - machine.writer.state == .waitingHeaders ∧ - ¬machine.writer.sentMessage - -/-- -Returns `true` if both reader and writer are closed and no output remains. --/ -@[inline] -def halted (machine : Machine dir) : Bool := - match machine.reader.state, machine.writer.state with - | .closed, .closed => machine.writer.outputData.isEmpty - | _, _ => false - -private def parseWith (machine : Machine dir) (parser : Parser α) (limit : Option Nat) - (expect : Option Nat := none) : Machine dir × Option α := - let remaining := machine.reader.input.remainingBytes - match parser machine.reader.input with - | .success buffer result => - ({ machine with reader := machine.reader.setInput buffer }, some result) - | .error it .eof => - let usedBytesUntilFailure := remaining - it.remainingBytes - if machine.reader.noMoreInput then - (machine.setFailure .connectionClosed, none) - else if let some limit := limit then - if usedBytesUntilFailure ≥ limit - then (machine.setFailure .badMessage, none) - else (machine.addEvent (.needMoreData expect), none) - else - (machine.addEvent (.needMoreData expect), none) - | .error _ _ => - (machine.setFailure .badMessage, none) - --- Message Processing - -private def resetForNextMessage (machine : Machine ty) : Machine ty := - - if machine.keepAlive then - { machine with - reader := { - state := .needStartLine, - input := machine.reader.input, - messageHead := {}, - messageCount := machine.reader.messageCount + 1 - }, - writer := { - userData := .empty, - outputData := machine.writer.outputData, - state := .pending, - knownSize := none, - messageHead := {}, - userClosedBody := false, - sentMessage := false - }, - events := machine.events.push .next, - error := none - } - else - machine.addEvent .close - |>.setWriterState .closed - |>.setReaderState .closed - -/- -This function processes the message we are receiving --/ -private def processHeaders (machine : Machine dir) : Machine dir := - let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages) - - let shouldKeepAlive : Bool := machine.reader.messageHead.shouldKeepAlive - let machine := updateKeepAlive machine shouldKeepAlive - - match checkMessageHead machine.reader.messageHead with - | none => machine.setFailure .badMessage - | some size => - let state : Reader.State dir := match size with - | .fixed n => Reader.State.needFixedBody n - | .chunked => Reader.State.needChunkedSize - - let machine := machine.addEvent (.endHeaders machine.reader.messageHead) - - let waitingContinue : Bool := - match dir with - | .receiving => hasExpectContinue machine.reader.messageHead - | .sending => false - - let nextState : Reader.State dir := if waitingContinue then Reader.State.«continue» state else state - let machine := if waitingContinue then machine.addEvent .continue else machine - - machine.setReaderState nextState - |>.setWriterState .waitingHeaders - |>.addEvent .needAnswer - -/-- -This processes the message we are sending. --/ -def setHeaders (messageHead : Message.Head dir.swap) (machine : Machine dir) : Machine dir := - let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages) - - let shouldKeepAlive := messageHead.shouldKeepAlive - let machine := machine.updateKeepAlive shouldKeepAlive - let size := Writer.determineTransferMode machine.writer - - let headers := - if messageHead.headers.contains Header.Name.host then - messageHead.headers - else if let some host := machine.host then - messageHead.headers.insert Header.Name.host host - else - messageHead.headers - - -- Add identity header based on direction - let headers := - let identityOpt := machine.config.identityHeader - match dir, identityOpt with - | .receiving, some server => headers.insert Header.Name.server server - | .sending, some userAgent => headers.insert Header.Name.userAgent userAgent - | _, none => headers - - -- Add Connection: close if needed - let headers := - if !machine.keepAlive ∧ !headers.hasEntry Header.Name.connection Header.Value.close then - headers.insert Header.Name.connection Header.Value.close - else - headers - - -- Add Content-Length or Transfer-Encoding if needed - let headers := - if !(headers.contains Header.Name.contentLength ∨ headers.contains Header.Name.transferEncoding) then - match size with - | .fixed n => headers.insert Header.Name.contentLength (.ofString! <| toString n) - | .chunked => headers.insert Header.Name.transferEncoding Header.Value.chunked - else - headers - - let state := Writer.State.writingBody size - - machine.modifyWriter (fun writer => { - writer with - - outputData := - match dir, messageHead with - | .receiving, messageHead => Encode.encode (v := .v11) writer.outputData { messageHead with headers } - | .sending, messageHead => Encode.encode (v := .v11) writer.outputData { messageHead with headers }, - - state - }) - -/--Put some data inside the input of the machine. -/ -@[inline] -def feed (machine : Machine ty) (data : ByteArray) : Machine ty := - if machine.isReaderClosed then - machine - else - { machine with reader := machine.reader.feed data } - -/--Signal that reader is not going to receive any more messages. -/ -@[inline] -def closeReader (machine : Machine dir) : Machine dir := - machine.modifyReader ({ · with noMoreInput := true }) - -/--Signal that the writer cannot send more messages because the socket closed. -/ -@[inline] -def closeWriter (machine : Machine dir) : Machine dir := - machine.modifyWriter ({ · with state := .closed, userClosedBody := true }) - -/--Signal that the user is not sending data anymore. -/ -@[inline] -def userClosedBody (machine : Machine dir) : Machine dir := - machine.modifyWriter ({ · with userClosedBody := true }) - -/--Signal that the socket is not sending data anymore. -/ -@[inline] -def noMoreInput (machine : Machine dir) : Machine dir := - machine.modifyReader ({ · with noMoreInput := true }) - -/--Set a known size for the message body. -/ -@[inline] -def setKnownSize (machine : Machine dir) (size : Body.Length) : Machine dir := - machine.modifyWriter (fun w => { w with knownSize := w.knownSize.or (some size) }) - -/--Send the head of a message to the machine. -/ -@[inline] -def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir := - if machine.isWaitingMessage then - let machine := machine.modifyWriter ({ · with messageHead := message, sentMessage := true }) - - let machine := - if machine.writer.knownSize.isNone then - match message.getSize false with - | some size => machine.setKnownSize size - | none => machine - else - machine - - machine.setWriterState .waitingForFlush - else - machine - -/-- -Allow body processing to continue after receiving `Expect: 100-continue`. --/ -def canContinue (machine : Machine dir) (status : Status) : Machine dir := - match dir, machine.reader.state with - | .sending, _ => machine - | .receiving, Reader.State.«continue» nextState => - if status == .«continue» then - let machine := machine.modifyWriter (fun writer => { - writer with outputData := Encode.encode (v := .v11) writer.outputData ({ status := .«continue» } : Response.Head) - }) - machine.setReaderState nextState - else - machine.send { status } - |>.userClosedBody - |>.disableKeepAlive - |>.closeReader - |>.setReaderState .closed - | .receiving, _ => machine - -/--Send data to the socket. -/ -@[inline] -def sendData (machine : Machine dir) (data : Array Chunk) : Machine dir := - if data.isEmpty then - machine - else - machine.modifyWriter (fun writer => { writer with userData := writer.userData ++ data }) - -/--Get all the events of the machine. -/ -@[inline] -def takeEvents (machine : Machine dir) : Machine dir × Array (Event dir) := - ({ machine with events := #[] }, machine.events) - -/--Take all the accumulated output to send to the socket. -/ -@[inline] -def takeOutput (machine : Machine dir) : Machine dir × ChunkedBuffer := - let output := machine.writer.outputData - ({ machine with writer := { machine.writer with outputData := .empty } }, output) - -/--Process the writer part of the machine. -/ -partial def processWrite (machine : Machine dir) : Machine dir := - match machine.writer.state with - | .pending => - if machine.reader.isClosed then - machine.closeWriter - else - machine - | .waitingHeaders => - machine.addEvent .needAnswer - | .waitingForFlush => - if machine.shouldFlush then - machine.setHeaders machine.writer.messageHead - |> processWrite - else - machine - - | .writingHeaders => - machine.setWriterState (.writingBody (Writer.determineTransferMode machine.writer)) - |> processWrite - - | .writingBody (.fixed n) => - if machine.writer.userData.size > 0 ∨ machine.writer.isReadyToSend then - let (writer, remaining) := Writer.writeFixedBody machine.writer n - let machine := { machine with writer } - - if machine.writer.isReadyToSend ∨ remaining = 0 then - machine.setWriterState .complete |> processWrite - else - machine.setWriterState (.writingBody (.fixed remaining)) - else - machine - - | .writingBody .chunked => - if machine.writer.userClosedBody then - machine.modifyWriter Writer.writeFinalChunk - |>.setWriterState .complete - |> processWrite - else if machine.writer.userData.size > 0 ∨ machine.writer.isReadyToSend then - machine.modifyWriter Writer.writeChunkedBody - |> processWrite - else - machine - - | .shuttingDown => - if machine.writer.outputData.isEmpty then - machine.setWriterState .complete |> processWrite - else - machine - - | .complete => - if machine.isReaderComplete then - if machine.keepAlive then - resetForNextMessage machine - else - machine.setWriterState .closed - |>.addEvent .close - else if machine.isReaderClosed then - machine.setWriterState .closed - |>.addEvent .close - else - if machine.keepAlive then - machine - else - machine.setWriterState .closed - - | .closed => - machine - -/--Handle the failed state for the reader. -/ -private def handleReaderFailed (machine : Machine dir) (error : H1.Error) : Machine dir := - let machine : Machine dir := - match dir with - | .receiving => - if ¬machine.writer.sentMessage ∧ ¬machine.writer.isClosed then - machine - |>.setWriterState .waitingHeaders - |>.disableKeepAlive - |>.send { status := .badRequest } |>.userClosedBody - else - machine - | .sending => machine - - machine - |>.setReaderState .closed - |>.addEvent (.failed error) - |>.setError error - -/--Process the reader part of the machine. -/ -partial def processRead (machine : Machine dir) : Machine dir := - match machine.reader.state with - | .needStartLine => - if machine.reader.noMoreInput ∧ machine.reader.input.atEnd then - machine.setReaderState .closed - else if machine.reader.input.atEnd then - machine.addEvent (.needMoreData none) - else - let (machine, result) : Machine dir × Option (Message.Head dir) := - match dir with - | .receiving => parseWith machine (parseRequestLine machine.config) (limit := some 8192) - | .sending => parseWith machine (parseStatusLine machine.config) (limit := some 8192) - - if let some head := result then - if head.version != .v11 then - machine.setFailure .unsupportedVersion - else - machine - |>.modifyReader (.setMessageHead head) - |>.setReaderState (.needHeader 0) - |> processRead - else - machine - - | .needHeader headerCount => - let (machine, result) := parseWith machine - (parseSingleHeader machine.config) (limit := none) - - if headerCount > machine.config.maxHeaders then - machine |>.setFailure .badMessage - else - if let some result := result then - if let some (name, value) := result then - if let some (name, headerValue) := Prod.mk <$> Header.Name.ofString? name <*> Header.Value.ofString? value then - machine - |>.modifyReader (.addHeader name headerValue) - |>.setReaderState (.needHeader (headerCount + 1)) - |> processRead - else - machine.setFailure .badMessage - else - processHeaders machine - |> processRead - else - machine - - | .needChunkedSize => - let (machine, result) := parseWith machine (parseChunkSize machine.config) (limit := some 128) - - match result with - | some (size, ext) => - machine - |>.setReaderState (.needChunkedBody ext size) - |> processRead - | none => - machine - - | .needChunkedBody ext 0 => - let (machine, result) := parseWith machine (parseLastChunkBody machine.config) (limit := some 2) - - match result with - | some _ => - machine - |>.setReaderState .complete - |>.addEvent (.gotData true ext .empty) - |> processRead - | none => - machine - - | .needChunkedBody ext size => - let (machine, result) := parseWith machine - (parseChunkedSizedData size) (limit := none) (some size) - - if let some body := result then - match body with - | .complete body => - machine - |>.setReaderState .needChunkedSize - |>.addEvent (.gotData false ext body) - |> processRead - | .incomplete body remaining => - machine - |>.setReaderState (.needChunkedBody ext remaining) - |>.addEvent (.gotData false ext body) - else - machine - - | .needFixedBody 0 => - machine - |>.setReaderState .complete - |>.addEvent (.gotData true #[] .empty) - |> processRead - - | .needFixedBody size => - let (machine, result) := parseWith machine (parseFixedSizeData size) (limit := none) (some size) - - if let some body := result then - match body with - | .complete body => - machine - |>.setReaderState .complete - |>.addEvent (.gotData true #[] body) - |> processRead - | .incomplete body remaining => - machine - |>.setReaderState (.needFixedBody remaining) - |>.addEvent (.gotData false #[] body) - else - machine - - | Reader.State.«continue» _ => - machine - - | .complete => - if (machine.reader.noMoreInput ∧ machine.reader.input.atEnd) ∨ ¬machine.keepAlive then - machine.setReaderState .closed - else - machine - - | .closed => - machine - - | .failed error => - handleReaderFailed machine error - -/-- -Execute one step of the state machine. --/ -def step (machine : Machine dir) : Machine dir × StepResult dir := - let machine := machine.processRead.processWrite - let (machine, events) := machine.takeEvents - let (machine, output) := machine.takeOutput - (machine, { events, output }) - -end Std.Http.Protocol.H1.Machine diff --git a/src/Std/Internal/Http/Protocol/H1/Config.lean b/src/Std/Internal/Http/Protocol/H1/Config.lean deleted file mode 100644 index 11fa9f178e12..000000000000 --- a/src/Std/Internal/Http/Protocol/H1/Config.lean +++ /dev/null @@ -1,97 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Internal.Http.Data -public import Std.Internal.Http.Internal - -public section - -/-! -# HTTP/1.1 Configuration - -This module defines the configuration options for HTTP/1.1 protocol processing, -including connection limits, header constraints, and various size limits. --/ - -namespace Std.Http.Protocol.H1 - -set_option linter.all true - -open Std Internal Parsec ByteArray -open Internal - -/-- -Connection limits configuration with validation. --/ -structure Config where - /-- - Maximum number of messages per connection. - -/ - maxMessages : Nat := 100 - - /-- - Maximum number of headers allowed per message. - -/ - maxHeaders : Nat := 100 - - /-- - Whether to enable keep-alive connections by default. - -/ - enableKeepAlive : Bool := true - - /-- - The server name (for sending responses) or user agent (for sending requests) - -/ - identityHeader : Option Header.Value := some (.mk "LeanServer") - - /-- - Maximum length of HTTP method token (default: 16 bytes) - -/ - maxMethodLength : Nat := 16 - - /-- - Maximum length of request URI (default: 8192 bytes) - -/ - maxUriLength : Nat := 8192 - - /-- - Maximum length of header field name (default: 256 bytes) - -/ - maxHeaderNameLength : Nat := 256 - - /-- - Maximum length of header field value (default: 8192 bytes) - -/ - maxHeaderValueLength : Nat := 8192 - - /-- - Maximum number of spaces in delimiter sequences (default: 256) - -/ - maxSpaceSequence : Nat := 256 - - /-- - Maximum length of chunk extension name (default: 256 bytes) - -/ - maxChunkExtNameLength : Nat := 256 - - /-- - Maximum length of chunk extension value (default: 256 bytes) - -/ - maxChunkExtValueLength : Nat := 256 - - /-- - Maximum length of reason phrase (default: 512 bytes) - -/ - maxReasonPhraseLength : Nat := 512 - - /-- - Maximum number of trailer headers (default: 100) - -/ - maxTrailerHeaders : Nat := 100 - -end Std.Http.Protocol.H1 diff --git a/src/Std/Internal/Http/Protocol/H1/Error.lean b/src/Std/Internal/Http/Protocol/H1/Error.lean deleted file mode 100644 index e02e7ee36b11..000000000000 --- a/src/Std/Internal/Http/Protocol/H1/Error.lean +++ /dev/null @@ -1,98 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Time -public import Std.Internal.Http.Data -public import Std.Internal.Http.Internal -public import Std.Internal.Http.Protocol.H1.Parser -public import Std.Internal.Http.Protocol.H1.Config -public import Std.Internal.Http.Protocol.H1.Message - -public section - -/-! -# HTTP/1.1 Errors - -This module defines the error types for HTTP/1.1 protocol processing, -including parsing errors, timeout errors, and connection errors. --/ - -namespace Std.Http.Protocol.H1 - -set_option linter.all true - -/-- -Specific HTTP processing errors with detailed information. --/ -inductive Error - /-- - Malformed request line or status line. - -/ - | invalidStatusLine - - /-- - Invalid or malformed header. - -/ - | invalidHeader - - /-- - Request timeout occurred. - -/ - | timeout - - /-- - Request entity too large. - -/ - | entityTooLarge - - /-- - Unsupported HTTP method. - -/ - | unsupportedMethod - - /-- - Unsupported HTTP version. - -/ - | unsupportedVersion - - /-- - Invalid chunk encoding. - -/ - | invalidChunk - - /-- - Connection Closed - -/ - | connectionClosed - - /-- - Bad request/response - -/ - | badMessage - - /-- - Generic error with message. - -/ - | other (message : String) -deriving Repr, BEq - -instance : ToString Error where - toString - | .invalidStatusLine => "Invalid status line" - | .invalidHeader => "Invalid header" - | .timeout => "Timeout" - | .entityTooLarge => "Entity too large" - | .unsupportedMethod => "Unsupported method" - | .unsupportedVersion => "Unsupported version" - | .invalidChunk => "Invalid chunk" - | .connectionClosed => "Connection closed" - | .badMessage => "Bad message" - | .other msg => s!"Other error: {msg}" - -instance : Repr ByteSlice where - reprPrec x := reprPrec x.toByteArray.data diff --git a/src/Std/Internal/Http/Protocol/H1/Event.lean b/src/Std/Internal/Http/Protocol/H1/Event.lean deleted file mode 100644 index 91dc11537ad2..000000000000 --- a/src/Std/Internal/Http/Protocol/H1/Event.lean +++ /dev/null @@ -1,78 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Time -public import Std.Internal.Http.Data -public import Std.Internal.Http.Internal -public import Std.Internal.Http.Protocol.H1.Parser -public import Std.Internal.Http.Protocol.H1.Config -public import Std.Internal.Http.Protocol.H1.Message -public import Std.Internal.Http.Protocol.H1.Error - -public section - -/-! -# HTTP/1.1 Events - -This module defines the events that can occur during HTTP/1.1 message processing, -including header completion, data arrival, and error conditions. --/ - -namespace Std.Http.Protocol.H1 - -set_option linter.all true - -/-- -Events emitted during HTTP message processing. --/ -inductive Event (dir : Direction) - /-- - Indicates that all headers have been successfully parsed. - -/ - | endHeaders (head : Message.Head dir) - - /-- - Carries a chunk of message body data. - -/ - | gotData (final : Bool) (ext : Array (ExtensionName × Option String)) (data : ByteSlice) - - /-- - Signals that additional input data is required to continue processing. - -/ - | needMoreData (size : Option Nat) - - /-- - Indicates a failure during parsing or processing. - -/ - | failed (err : Error) - - /-- - Requests that the connection be closed. - -/ - | close - - /-- - Indicates that a response is required. - -/ - | needAnswer - - /-- - Indicates that a message body is required. - -/ - | needBody - - /-- - Indicates readiness to process the next message. - -/ - | next - - /-- - Indicates that it needs a continue. - -/ - | «continue» -deriving Inhabited, Repr diff --git a/src/Std/Internal/Http/Protocol/H1/Message.lean b/src/Std/Internal/Http/Protocol/H1/Message.lean deleted file mode 100644 index ebd96b71609a..000000000000 --- a/src/Std/Internal/Http/Protocol/H1/Message.lean +++ /dev/null @@ -1,115 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -import Init.Data.Array -public import Std.Internal.Http.Data - -public section - -/-! -# Message - -This module provides types and operations for HTTP/1.1 messages, centered around the `Direction` -type which models the server's role in message exchange: `Direction.receiving` for parsing incoming -requests from clients, and `Direction.sending` for generating outgoing responses to clients. -The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or -`Response.Head` accordingly, enabling generic code that works uniformly across both phases -while exposing common operations such as headers, version, and `shouldKeepAlive` --/ - -namespace Std.Http.Protocol.H1 - -set_option linter.all true - -/-- -Direction of message flow from the server's perspective. --/ -inductive Direction - /-- - Receiving and parsing incoming requests from clients. - -/ - | receiving - - /-- - Generating and sending outgoing responses to clients. - -/ - | sending -deriving BEq - -/-- -Inverts the direction of the requests. --/ -@[expose] -abbrev Direction.swap : Direction → Direction - | .receiving => .sending - | .sending => .receiving - -/-- -Gets the message head type based on direction. --/ -@[expose] -def Message.Head : Direction → Type - | .receiving => Request.Head - | .sending => Response.Head - -/-- -Gets the headers of a `Message`. --/ -def Message.Head.headers (m : Message.Head dir) : Headers := - match dir with - | .receiving => Request.Head.headers m - | .sending => Response.Head.headers m - -/-- -Gets the version of a `Message`. --/ -def Message.Head.version (m : Message.Head dir) : Version := - match dir with - | .receiving => Request.Head.version m - | .sending => Response.Head.version m - -private def isChunked (message : Message.Head dir) : Option Bool := - match message.headers.get? .transferEncoding with - | none => some false - | some v => Header.TransferEncoding.parse v |>.map (·.isChunked) - -/-- -Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag. --/ -def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length := - match (message.headers.getAll? .contentLength, isChunked message) with - | (some #[cl], some false) => .fixed <$> cl.value.toNat? - | (none, some false) => if allowEOFBody then some (.fixed 0) else none - | (none, some true) => some .chunked - | (some _, some _) => none -- To avoid request smuggling with multiple content-length headers. - | (_, none) => none -- Error validating the chunked encoding - - -/-- -Checks whether the message indicates that the connection should be kept alive. --/ -@[inline] -def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool := - ¬message.headers.hasEntry .connection (.mk "close") - ∧ message.version = .v11 - -instance : Repr (Message.Head dir) := - match dir with - | .receiving => inferInstanceAs (Repr Request.Head) - | .sending => inferInstanceAs (Repr Response.Head) - -instance : Internal.Encode .v11 (Message.Head dir) := - match dir with - | .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head) - | .sending => inferInstanceAs (Internal.Encode .v11 Response.Head) - -instance : EmptyCollection (Message.Head dir) where - emptyCollection := - match dir with - | .receiving => {} - | .sending => {} diff --git a/src/Std/Internal/Http/Protocol/H1/Parser.lean b/src/Std/Internal/Http/Protocol/H1/Parser.lean deleted file mode 100644 index 4719d58c9f03..000000000000 --- a/src/Std/Internal/Http/Protocol/H1/Parser.lean +++ /dev/null @@ -1,328 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Internal.Parsec -public import Std.Internal.Http.Data -public import Std.Internal.Parsec.ByteArray -public import Std.Internal.Http.Protocol.H1.Config - -/-! -This module defines a parser for HTTP/1.1 requests. The reference used is https://httpwg.org/specs/rfc9112.html. --/ - -namespace Std.Http.Protocol.H1 - -open Std Internal Parsec ByteArray Internal - -set_option linter.all true - -@[inline] -def isDigit (c : UInt8) : Bool := - c ≥ '0'.toUInt8 ∧ c ≤ '9'.toUInt8 - -@[inline] -def isAlpha (c : UInt8) : Bool := - (c ≥ 'a'.toUInt8 ∧ c ≤ 'z'.toUInt8) ∨ (c ≥ 'A'.toUInt8 ∧ c ≤ 'Z'.toUInt8) - -@[inline] -def isVChar (c : UInt8) : Bool := - c ≥ 0x21 ∧ c ≤ 0x7E - -def isTokenCharacter (c : UInt8) : Bool := - isDigit c ∨ isAlpha c ∨ c == '!'.toUInt8 ∨ c == '#'.toUInt8 ∨ c == '$'.toUInt8 ∨ c == '%'.toUInt8 ∨ - c == '&'.toUInt8 ∨ c == '\''.toUInt8 ∨ c == '*'.toUInt8 ∨ c == '+'.toUInt8 ∨ c == '-'.toUInt8 ∨ - c == '.'.toUInt8 ∨ c == '^'.toUInt8 ∨ c == '_'.toUInt8 ∨ c == '`'.toUInt8 ∨ c == '|'.toUInt8 ∨ - c == '~'.toUInt8 - -@[inline] -def isObsChar (c : UInt8) : Bool := - c ≥ 0x80 ∧ c ≤ 0xFF - -@[inline] -def isFieldVChar (c : UInt8) : Bool := - isVChar c ∨ isObsChar c ∨ c = ' '.toUInt8 ∨ c = '\t'.toUInt8 - --- HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text -@[inline] -def isQdText (c : UInt8) : Bool := - c == '\t'.toUInt8 ∨ - c == ' '.toUInt8 ∨ - c == '!'.toUInt8 ∨ - (c ≥ '#'.toUInt8 ∧ c ≤ '['.toUInt8) ∨ - (c ≥ ']'.toUInt8 ∧ c ≤ '~'.toUInt8) ∨ - isObsChar c - --- Parser blocks - -def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do - let items ← many (attempt <| parser.bind (fun item => match item with - | some x => return x - | none => fail "end of items")) - if items.size > maxCount then - fail s!"Too many items: {items.size} > {maxCount}" - return items - -def opt (x : Option α) : Parser α := - if let some res := x then - return res - else - fail "expected value but got none" - -@[inline] -def token (limit : Nat) : Parser ByteSlice := - takeWhileUpTo1 isTokenCharacter limit - -@[inline] -def crlf : Parser Unit := do - discard <| optional (skipByte '\r'.toUInt8) - skipByte '\n'.toUInt8 - -@[inline] -def rsp (limits : H1.Config) : Parser Unit := do - discard <| takeWhileUpTo1 (· == ' '.toUInt8) limits.maxSpaceSequence - - if (← peekWhen? (· == ' '.toUInt8)) |>.isSome then - fail "invalid space sequence" - else - pure () - -@[inline] -def osp (limits : H1.Config) : Parser Unit := do - discard <| takeWhileUpTo (· == ' '.toUInt8) limits.maxSpaceSequence - - if (← peekWhen? (· == ' '.toUInt8)) |>.isSome then - fail "invalid space sequence" - else - pure () - -@[inline] -def uint8 : Parser UInt8 := do - let d ← digit - return d.toUInt8 - -def hexDigit : Parser UInt8 := do - let b ← any - if b ≥ '0'.toUInt8 && b ≤ '9'.toUInt8 then return b - '0'.toUInt8 - else if b ≥ 'A'.toUInt8 && b ≤ 'F'.toUInt8 then return b - 'A'.toUInt8 + 10 - else if b ≥ 'a'.toUInt8 && b ≤ 'f'.toUInt8 then return b - 'a'.toUInt8 + 10 - else fail s!"Invalid hex digit {Char.ofUInt8 b |>.quote}" - -@[inline] -def hex : Parser Nat := do - let hexDigits ← many1 (attempt hexDigit) - return (hexDigits.foldl (fun acc cur => acc * 16 + cur.toNat) 0) - --- Actual parsers - --- HTTP-version = HTTP-name "/" DIGIT "." DIGIT --- HTTP-name = %s"HTTP" -def parseHttpVersion : Parser Version := do - skipBytes "HTTP/".toUTF8 - let major ← uint8 - skipByte '.'.toUInt8 - let minor ← uint8 - opt <| Version.ofNumber? (major - 48 |>.toNat) (minor - 48 |>.toNat) - --- method = token -def parseMethod : Parser Method := - (skipBytes "GET".toUTF8 <&> fun _ => Method.get) - <|> (skipBytes "HEAD".toUTF8 <&> fun _ => Method.head) - <|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post) - <|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put) - <|> (skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete) - <|> (skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect) - <|> (skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options) - <|> (skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace) - <|> (skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch) - -def parseURI (limits : H1.Config) : Parser ByteArray := do - let uri ← takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength - return uri.toByteArray - -/-- -Parses a request line - -request-line = method SP request-target SP HTTP-version --/ -public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do - let method ← parseMethod <* rsp limits - let uri ← parseURI limits <* rsp limits - - let uri ← match (Std.Http.URI.Parser.parseRequestTarget <* eof).run uri with - | .ok res => pure res - | .error res => fail res - - let version ← parseHttpVersion <* crlf - return ⟨method, version, uri, .empty⟩ - --- field-line = field-name ":" OWS field-value OWS -def parseFieldLine (limits : H1.Config) : Parser (String × String) := do - let name ← token limits.maxHeaderNameLength - let value ← skipByte ':'.toUInt8 *> osp limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* osp limits - - let name ← opt <| String.fromUTF8? name.toByteArray - let value ← opt <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty - - return (name, value) - -/-- -Parses a single header. - -field-line CRLF / CRLF --/ -public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do - let next ← peek? - if next == some '\r'.toUInt8 ∨ next == some '\n'.toUInt8 then - crlf - pure none - else - some <$> (parseFieldLine limits <* crlf) - --- quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) -def parseQuotedPair : Parser UInt8 := do - skipByte '\\'.toUInt8 - let b ← any - - if b == '\t'.toUInt8 ∨ b == ' '.toUInt8 ∨ isVChar b ∨ isObsChar b then - return b - else - fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}" - --- quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE -partial def parseQuotedString : Parser String := do - skipByte '"'.toUInt8 - - let rec loop (buf : ByteArray) : Parser ByteArray := do - let b ← any - - if b == '"'.toUInt8 then - return buf - else if b == '\\'.toUInt8 then - let next ← any - if next == '\t'.toUInt8 ∨ next == ' '.toUInt8 ∨ isVChar next ∨ isObsChar next - then loop (buf.push next) - else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}" - else if isQdText b then - loop (buf.push b) - else - fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}" - - opt <| String.fromUTF8? (← loop .empty) - --- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] ) -def parseChunkExt (limits : H1.Config) : Parser (ExtensionName × Option String) := do - osp limits *> skipByte ';'.toUInt8 *> osp limits - let name ← (opt =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtNameLength) <* osp limits - - let some name := ExtensionName.ofString? name - | fail "invalid extension name" - - if (← peekWhen? (· == '='.toUInt8)) |>.isSome then - osp limits *> skipByte '='.toUInt8 *> osp limits - let value ← osp limits *> (parseQuotedString <|> opt =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtValueLength)) - - return (name, some value) - - return (name, none) - -/-- -This function parses the size and extension of a chunk --/ -public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (ExtensionName × Option String)) := do - let size ← hex - let ext ← many (parseChunkExt limits) - crlf - return (size, ext) - -/-- -Result of parsing partial or complete information. --/ -public inductive TakeResult - | complete (data : ByteSlice) - | incomplete (data : ByteSlice) (remaining : Nat) - -/-- -This function parses a single chunk in chunked transfer encoding --/ -public def parseChunk (limits : H1.Config) : Parser (Option (Nat × Array (ExtensionName × Option String) × ByteSlice)) := do - let (size, ext) ← parseChunkSize limits - if size == 0 then - return none - else - let data ← take size - return some ⟨size, ext, data⟩ - -/-- -Parses a fixed size data that can be incomplete. --/ -public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it => - if it.remainingBytes = 0 then - .error it .eof - else if it.remainingBytes < size then - .success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes)) - else - .success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)])) - -/-- -Parses a fixed size data that can be incomplete. --/ -public def parseChunkedSizedData (size : Nat) : Parser TakeResult := do - match ← parseFixedSizeData size with - | .complete data => crlf *> return .complete data - | .incomplete data res => return .incomplete data res - -/-- -This function parses a trailer header (used after chunked body) --/ -def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := parseSingleHeader limits - -/-- -This function parses trailer headers after chunked body --/ -public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do - let trailers ← manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders - crlf - return trailers - -/-- -Parses HTTP status code (3 digits) --/ -def parseStatusCode : Parser Status := do - let d1 ← digit - let d2 ← digit - let d3 ← digit - let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48) - - return Status.ofCode code.toUInt16 - -/-- -Parses reason phrase (text after status code) --/ -def parseReasonPhrase (limits : H1.Config) : Parser String := do - let bytes ← takeWhileUpTo (fun c => c != '\r'.toUInt8) limits.maxReasonPhraseLength - opt <| String.fromUTF8? bytes.toByteArray - -/-- -Parses a status line - -status-line = HTTP-version SP status-code SP [ reason-phrase ] --/ -public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do - let version ← parseHttpVersion <* rsp limits - let status ← parseStatusCode <* rsp limits - discard <| parseReasonPhrase limits <* crlf - return ⟨status, version, .empty⟩ - -/-- -This function parses the body of the last chunk. --/ -public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do - discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders - crlf - -end Std.Http.Protocol.H1 diff --git a/src/Std/Internal/Http/Protocol/H1/Reader.lean b/src/Std/Internal/Http/Protocol/H1/Reader.lean deleted file mode 100644 index 200c044b2208..000000000000 --- a/src/Std/Internal/Http/Protocol/H1/Reader.lean +++ /dev/null @@ -1,275 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Time -public import Std.Internal.Http.Data -public import Std.Internal.Http.Internal -public import Std.Internal.Http.Protocol.H1.Parser -public import Std.Internal.Http.Protocol.H1.Config -public import Std.Internal.Http.Protocol.H1.Message -public import Std.Internal.Http.Protocol.H1.Error - -public section - -/-! -# HTTP/1.1 Reader - -This module defines the reader state machine for parsing incoming HTTP/1.1 messages. -It tracks the parsing state including start line, headers, and body handling for -both fixed-length and chunked transfer encodings. --/ - -namespace Std.Http.Protocol.H1 - -set_option linter.all true - -/-- -The state of the `Reader` state machine. --/ -inductive Reader.State (dir : Direction) : Type - /-- - Initial state waiting for HTTP start line. - -/ - | needStartLine : State dir - - /-- - State waiting for HTTP headers, tracking number of headers parsed. - -/ - | needHeader : Nat → State dir - - /-- - State waiting for chunk size in chunked transfer encoding. - -/ - | needChunkedSize : State dir - - /-- - State waiting for chunk body data of specified size. - -/ - | needChunkedBody : Array (ExtensionName × Option String) → Nat → State dir - - /-- - State waiting for fixed-length body data of specified size. - -/ - | needFixedBody : Nat → State dir - - /-- - Paused waiting for a `canContinue` decision, carrying the next state. - -/ - | continue : State dir → State dir - - /-- - State that it completed a single request or response and can go to the next one - -/ - | complete - - /-- - State that it has completed and cannot process more data. - -/ - | closed - - /-- - The input is malformed. - -/ - | failed (error : Error) : State dir -deriving Inhabited, Repr, BEq - -/-- -Manages the reading state of the HTTP parsing and processing machine. --/ -structure Reader (dir : Direction) where - /-- - The current state of the machine. - -/ - state : Reader.State dir := .needStartLine - - /-- - The input byte array. - -/ - input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter - - /-- - The incoming message head. - -/ - messageHead : Message.Head dir := {} - - /-- - Count of messages that this connection already parsed - -/ - messageCount : Nat := 0 - - /-- - Flag that says that it cannot receive more input (the socket disconnected). - -/ - noMoreInput : Bool := false - -namespace Reader - -/-- -Checks if the reader is in a closed state and cannot process more messages. --/ -@[inline] -def isClosed (reader : Reader dir) : Bool := - match reader.state with - | .closed => true - | _ => false - -/-- -Checks if the reader has completed parsing the current message. --/ -@[inline] -def isComplete (reader : Reader dir) : Bool := - match reader.state with - | .complete => true - | _ => false - -/-- -Checks if the reader has encountered an error. --/ -@[inline] -def hasFailed (reader : Reader dir) : Bool := - match reader.state with - | .failed _ => true - | _ => false - -/-- -Feeds new data into the reader's input buffer. -If the current input is exhausted, replaces it; otherwise compacts the buffer -by discarding already-parsed bytes before appending. --/ -@[inline] -def feed (data : ByteArray) (reader : Reader dir) : Reader dir := - { reader with input := - if reader.input.atEnd - then data.iter - else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter } - -/-- -Replaces the reader's input iterator with a new one. --/ -@[inline] -def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir := - { reader with input } - -/-- -Updates the message head being constructed. --/ -@[inline] -def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir := - { reader with messageHead } - -/-- -Adds a header to the current message head. --/ -@[inline] -def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir := - match dir with - | .sending => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } } - | .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } } - -/-- -Closes the reader, transitioning to the closed state. --/ -@[inline] -def close (reader : Reader dir) : Reader dir := - { reader with state := .closed, noMoreInput := true } - -/-- -Marks the current message as complete and prepares for the next message. --/ -@[inline] -def markComplete (reader : Reader dir) : Reader dir := - { reader with - state := .complete - messageCount := reader.messageCount + 1 } - -/-- -Transitions the reader to a failed state with the given error. --/ -@[inline] -def fail (error : Error) (reader : Reader dir) : Reader dir := - { reader with state := .failed error } - -/-- -Resets the reader to parse a new message on the same connection. --/ -@[inline] -def reset (reader : Reader dir) : Reader dir := - { reader with - state := .needStartLine - messageHead := {} } - -/-- -Checks if more input is needed to continue parsing. --/ -@[inline] -def needsMoreInput (reader : Reader dir) : Bool := - reader.input.atEnd && !reader.noMoreInput && - match reader.state with - | .complete | .closed | .failed _ | .«continue» _ => false - | _ => true - -/-- -Returns the current parse error if the reader has failed. --/ -@[inline] -def getError (reader : Reader dir) : Option Error := - match reader.state with - | .failed err => some err - | _ => none - -/-- -Gets the number of bytes remaining in the input buffer. --/ -@[inline] -def remainingBytes (reader : Reader dir) : Nat := - reader.input.array.size - reader.input.pos - -/-- -Advances the input iterator by n bytes. --/ -@[inline] -def advance (n : Nat) (reader : Reader dir) : Reader dir := - { reader with input := reader.input.forward n } - -/-- -Transitions to the state for reading headers. --/ -@[inline] -def startHeaders (reader : Reader dir) : Reader dir := - { reader with state := .needHeader 0 } - -/-- -Transitions to the state for reading a fixed-length body. --/ -@[inline] -def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir := - { reader with state := .needFixedBody size } - -/-- -Transitions to the state for reading chunked transfer encoding. --/ -@[inline] -def startChunkedBody (reader : Reader dir) : Reader dir := - { reader with state := .needChunkedSize } - -/-- -Marks that no more input will be provided (connection closed). --/ -@[inline] -def markNoMoreInput (reader : Reader dir) : Reader dir := - { reader with noMoreInput := true } - -/-- -Checks if the connection should be kept alive for the next message. --/ -def shouldKeepAlive (reader : Reader dir) : Bool := - match reader.messageHead.headers.get? .connection with - | some val => let s := val.value.toLower; s == "keep-alive" - | none => true - -end Reader diff --git a/src/Std/Internal/Http/Protocol/H1/Writer.lean b/src/Std/Internal/Http/Protocol/H1/Writer.lean deleted file mode 100644 index c4a43438996b..000000000000 --- a/src/Std/Internal/Http/Protocol/H1/Writer.lean +++ /dev/null @@ -1,265 +0,0 @@ -/- -Copyright (c) 2025 Lean FRO, LLC. All rights reserved. -Released under Apache 2.0 license as described in the file LICENSE. -Authors: Sofia Rodrigues --/ -module - -prelude -public import Std.Time -public import Std.Internal.Http.Data -public import Std.Internal.Http.Internal -public import Std.Internal.Http.Protocol.H1.Parser -public import Std.Internal.Http.Protocol.H1.Config -public import Std.Internal.Http.Protocol.H1.Message -public import Std.Internal.Http.Protocol.H1.Error - -public section - -/-! -# HTTP/1.1 Writer - -This module defines the writer state machine for generating outgoing HTTP/1.1 messages. -It handles encoding headers and body content for both fixed-length and chunked -transfer encodings. --/ - -namespace Std.Http.Protocol.H1 - -set_option linter.all true - -open Internal - -/-- -The state of the `Writer` state machine. --/ -inductive Writer.State - /-- - It starts writing only when part of the request is received. - -/ - | pending - - /-- - Ready to write the message - -/ - | waitingHeaders - - /-- - This is the state that we wait for a forced flush. This happens and causes the writer to - start actually writing to the outputData - -/ - | waitingForFlush - - /-- - Writing the headers. - -/ - | writingHeaders - - /-- - Writing a fixed size body output. - -/ - | writingBody (mode : Body.Length) - - /-- - It will flush all the remaining data and cause it to shutdown the machine. - -/ - | shuttingDown - - /-- - State that it completed a single request and can go to the next one - -/ - | complete - - /-- - State that it has completed and cannot process more data. - -/ - | closed -deriving Inhabited, Repr, BEq - -/-- -Manages the writing state of the HTTP generating and writing machine. --/ -structure Writer (dir : Direction) where - /-- - This is all the data that the user is sending that is being accumulated. - -/ - userData : Array Chunk := .empty - - /-- - All the data that is produced by the writer. - -/ - outputData : ChunkedBuffer := .empty - - /-- - The state of the writer machine. - -/ - state : Writer.State := .pending - - /-- - When the user specifies the exact size upfront, we can use Content-Length - instead of chunked transfer encoding for streaming - -/ - knownSize : Option Body.Length := none - - /-- - The outgoing message that will be written to the output - -/ - messageHead : Message.Head dir.swap := {} - - /-- - The user sent the message - -/ - sentMessage : Bool := false - - /-- - This flags that the body stream is closed so if we start to write the body we know exactly the size. - -/ - userClosedBody : Bool := false - -namespace Writer - -/-- -Checks if the writer is ready to send data to the output. --/ -@[inline] -def isReadyToSend {dir} (writer : Writer dir) : Bool := - match writer.state with - | .closed | .complete => true - | _ => writer.userClosedBody - -/-- -Checks if the writer is closed (cannot process more data) --/ -@[inline] -def isClosed (writer : Writer dir) : Bool := - match writer.state with - | .closed => true - | _ => false - -/-- -Checks if the writer has completed processing a request --/ -@[inline] -def isComplete (writer : Writer dir) : Bool := - match writer.state with - | .complete => true - | _ => false - -/-- -Checks if the writer can accept more data from the user --/ -@[inline] -def canAcceptData (writer : Writer dir) : Bool := - match writer.state with - | .waitingHeaders => true - | .waitingForFlush => true - | .writingBody _ => !writer.userClosedBody - | _ => false - -/-- -Marks the body as closed, indicating no more user data will be added --/ -@[inline] -def closeBody (writer : Writer dir) : Writer dir := - { writer with userClosedBody := true } - -/-- -Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked --/ -def determineTransferMode (writer : Writer dir) : Body.Length := - if let some mode := writer.knownSize then - mode - else if writer.userClosedBody then - let size := writer.userData.foldl (fun x y => x + y.data.size) 0 - .fixed size - else - .chunked - -/-- -Adds user data chunks to the writer's buffer if the writer can accept data --/ -@[inline] -def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir := - if writer.canAcceptData then - { writer with userData := writer.userData ++ data } - else - writer - -/-- -Writes accumulated user data to output using fixed-size encoding --/ -def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat := - if writer.userData.size = 0 then - (writer, limitSize) - else - let data := writer.userData.map Chunk.data - let (chunks, totalSize) := data.foldl (fun (acc, size) ba => - if size >= limitSize then - (acc, size) - else - let remaining := limitSize - size - let takeSize := min ba.size remaining - let chunk := ba.extract 0 takeSize - (acc.push chunk, size + takeSize) - ) (#[], 0) - let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks) - let remaining := limitSize - totalSize - ({ writer with userData := #[], outputData }, remaining) - -/-- -Writes accumulated user data to output using chunked transfer encoding --/ -def writeChunkedBody (writer : Writer dir) : Writer dir := - if writer.userData.size = 0 then - writer - else - let data := writer.userData - { writer with userData := #[], outputData := data.foldl (Encode.encode .v11) writer.outputData } - -/-- -Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state --/ -def writeFinalChunk (writer : Writer dir) : Writer dir := - let writer := writer.writeChunkedBody - { writer with - outputData := writer.outputData.write "0\r\n\r\n".toUTF8 - state := .complete - } - -/-- -Extracts all accumulated output data and returns it with a cleared output buffer --/ -@[inline] -def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) := - let output := writer.outputData.toByteArray - some ({ writer with outputData := ChunkedBuffer.empty }, output) - -/-- -Updates the writer's state machine to a new state --/ -@[inline] -def setState (state : Writer.State) (writer : Writer dir) : Writer dir := - { writer with state } - -/-- -Writes the message headers to the output buffer --/ -private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir := - { writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead } - -/-- -Checks if the connection should be kept alive based on the Connection header --/ -def shouldKeepAlive (writer : Writer dir) : Bool := - writer.messageHead.headers.get? .connection - |>.map (fun v => v.value.toLower != "close") - |>.getD true - -/-- -Closes the writer, transitioning to the closed state. --/ -@[inline] -def close (writer : Writer dir) : Writer dir := - { writer with state := .closed } - -end Writer From 9acca40aafbcc30eb47cfd2c4b241ebfca4ea1b9 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 13 Feb 2026 10:21:57 -0300 Subject: [PATCH 29/44] revert: h1 --- src/Std/Internal/Http.lean | 1 - src/Std/Internal/Parsec/ByteArray.lean | 21 ++++----------------- 2 files changed, 4 insertions(+), 18 deletions(-) diff --git a/src/Std/Internal/Http.lean b/src/Std/Internal/Http.lean index 9c7ee1f6899b..1dc3c0b6ad35 100644 --- a/src/Std/Internal/Http.lean +++ b/src/Std/Internal/Http.lean @@ -7,4 +7,3 @@ module prelude public import Std.Internal.Http.Data -public import Std.Internal.Http.Protocol.H1 diff --git a/src/Std/Internal/Parsec/ByteArray.lean b/src/Std/Internal/Parsec/ByteArray.lean index 787191384fbb..7832bb5ec549 100644 --- a/src/Std/Internal/Parsec/ByteArray.lean +++ b/src/Std/Internal/Parsec/ByteArray.lean @@ -57,29 +57,16 @@ def skipByte (b : UInt8) : Parser Unit := /-- Skip a sequence of bytes equal to the given `ByteArray`. -/ -def skipBytes (arr : ByteArray) : Parser Unit := fun it => - let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator := - if h : idx < arr.size then - if hnext : it.hasNext then - let got := it.curr' hnext - let want := arr[idx] - if got = want then - go (idx + 1) (it.next' hnext) - else - .error it (.other s!"expected byte {want}, got {got}") - else - .error it (.other s!"unexpected end of input while matching {arr.size} bytes") - else - .success it () - go 0 it +def skipBytes (arr : ByteArray) : Parser Unit := do + for b in arr do + skipByte b /-- Parse a string by matching its UTF-8 bytes, returns the string on success. -/ @[inline] def pstring (s : String) : Parser String := do - let utf8 := s.toUTF8 - skipBytes utf8 + skipBytes s.toUTF8 return s /-- From 8722e508971f029e03c2811b761f07413a9b7ad6 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Mon, 16 Feb 2026 22:33:35 -0300 Subject: [PATCH 30/44] feat: pull-based body --- src/Std/Internal/Http/Data/Body/Stream.lean | 856 ++++++++++++-------- tests/lean/run/async_http_body.lean | 358 ++++---- 2 files changed, 725 insertions(+), 489 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 30245ef2f9e3..527808d4af78 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -13,17 +13,20 @@ public import Std.Internal.Http.Data.Response public import Std.Internal.Http.Data.Chunk public import Std.Internal.Http.Data.Body.Basic public import Std.Internal.Http.Data.Body.Length -public import Init.Data.Queue public import Init.Data.ByteArray public section /-! -# Body.Stream +# Body Channels -A `Stream` represents an asynchronous channel for streaming data in chunks. It provides an -interface for producers and consumers to exchange chunks with optional metadata (extensions), -making it suitable for HTTP chunked transfer encoding and other streaming scenarios. +This module defines a zero-buffer rendezvous body channel split into two faces: + +- `Body.Outgoing`: producer side (send chunks) +- `Body.Incoming`: consumer side (receive chunks) + +There is no queue and no capacity. A send waits for a receiver and a receive waits for a sender. +At most one blocked producer and one blocked consumer are supported. -/ namespace Std.Http.Body @@ -31,10 +34,9 @@ open Std Internal IO Async set_option linter.all true -namespace Stream +namespace Channel open Internal.IO.Async in - private inductive Consumer where | normal (promise : IO.Promise (Option Chunk)) | select (finished : Waiter (Option Chunk)) @@ -55,503 +57,669 @@ private structure Producer where chunk : Chunk promise : IO.Promise Bool -private structure State where - /-- - Chunks pushed into the stream that are waiting to be consumed. - -/ - values : Std.Queue Chunk - - /-- - Current number of chunks buffered in the stream. - -/ - amount : Nat +open Internal.IO.Async in +private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Bool := do + let lose := return false + let win promise := do + promise.resolve (.ok x) + return true + waiter.race lose win +private structure State where /-- - Maximum number of chunks allowed in the buffer. Writers block when amount ≥ capacity. + A single blocked producer waiting for a receiver -/ - capacity : Nat + pendingProducer : Option Producer /-- - Consumers that are blocked on a producer providing them a chunk. They will be resolved to `none` - if the stream closes. + A single blocked consumer waiting for a producer -/ - consumers : Std.Queue Consumer + pendingConsumer : Option Consumer /-- - Producers that are blocked waiting for buffer space to become available. + A waiter for `Outgoing.interestSelector` -/ - producers : Std.Queue Producer + interestWaiter : Option (Internal.IO.Async.Waiter Bool) /-- - Whether the stream is closed already. + Whether the channel is closed -/ closed : Bool + /-- - Known size of the stream if available. + Known size of the stream if available -/ knownSize : Option Body.Length deriving Nonempty -end Stream +end Channel -/-- -A channel for chunks with support for chunk extensions. --/ -structure Stream where +/-- Receive-side face of a body channel. -/ +structure Incoming where private mk :: - private state : Mutex Stream.State + private state : Mutex Channel.State deriving Nonempty, TypeName -namespace Stream +/-- Send-side face of a body channel. -/ +structure Outgoing where + private mk :: + private state : Mutex Channel.State +deriving Nonempty, TypeName -/-- -Creates a new Stream with a specified capacity. --/ -def emptyWithCapacity (capacity : Nat := 128) : Async Stream := do - return { - state := ← Mutex.new { - values := ∅ - consumers := ∅ - producers := ∅ - amount := 0 - capacity - closed := false - knownSize := none - } +/-- Creates a rendezvous body channel. -/ +def mkChannel : Async (Outgoing × Incoming) := do + let state ← Mutex.new { + pendingProducer := none + pendingConsumer := none + interestWaiter := none + closed := false + knownSize := none } + return ({ state }, { state }) -/-- -Creates a new Stream with default capacity. --/ -@[always_inline, inline] -def empty : Async Stream := - emptyWithCapacity +namespace Channel private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length := match knownSize with | some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size)) | _ => knownSize -private def tryWakeProducer [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : +private def pruneFinishedWaiters [Monad m] [MonadLiftT (ST IO.RealWorld) m] : AtomicT State m Unit := do let st ← get - -- Try to wake a producer if we have space - if st.amount < st.capacity then - if let some (producer, producers) := st.producers.dequeue? then - let chunk := producer.chunk - if st.amount + 1 <= st.capacity then - set { st with - values := st.values.enqueue chunk, - amount := st.amount + 1, - producers - } - producer.promise.resolve true + + let pendingConsumer ← + match st.pendingConsumer with + | some (.select waiter) => + if ← waiter.checkFinished then + pure none + else + pure st.pendingConsumer + | _ => + pure st.pendingConsumer + + let interestWaiter ← + match st.interestWaiter with + | some waiter => + if ← waiter.checkFinished then + pure none else - set { st with producers := producers.enqueue producer } + pure st.interestWaiter + | none => + pure none + + set { st with pendingConsumer, interestWaiter } + +private def signalInterest [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : + AtomicT State m Unit := do + let st ← get + if let some waiter := st.interestWaiter then + discard <| resolveInterestWaiter waiter true + set { st with interestWaiter := none } + +private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : + AtomicT State m Bool := do + let st ← get + return st.pendingProducer.isSome || st.closed + +private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : + AtomicT State m Bool := do + let st ← get + return st.pendingConsumer.isSome private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : AtomicT State m (Option Chunk) := do let st ← get - if let some (chunk, values) := st.values.dequeue? then - let newKnownSize := decreaseKnownSize st.knownSize chunk - let newAmount := st.amount - 1 - set { st with values, knownSize := newKnownSize, amount := newAmount } - tryWakeProducer - return some chunk + if let some producer := st.pendingProducer then + producer.promise.resolve true + set { + st with + pendingProducer := none + knownSize := decreaseKnownSize st.knownSize producer.chunk + } + return some producer.chunk else return none -/-- -Attempts to receive a chunk from the stream. Returns `some` with a chunk when data is available, or `none` -when the stream is closed or no data is available. --/ -def tryRecv (stream : Stream) : Async (Option Chunk) := - stream.state.atomically do - tryRecv' - -private def recv' (stream : Stream) : BaseIO (Task (Option Chunk)) := do - stream.state.atomically do - if let some chunk ← tryRecv' then - return .pure <| some chunk - else if (← get).closed then - return .pure none - else - let promise ← IO.Promise.new - modify fun st => { st with consumers := st.consumers.enqueue (.normal promise) } - return promise.result?.map (sync := true) (·.bind id) +private def close' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : + AtomicT State m Unit := do + let st ← get + if st.closed then + return () + + if let some consumer := st.pendingConsumer then + discard <| consumer.resolve none + + if let some producer := st.pendingProducer then + producer.promise.resolve false + + if let some waiter := st.interestWaiter then + discard <| resolveInterestWaiter waiter false + + set { + st with + pendingProducer := none + pendingConsumer := none + interestWaiter := none + closed := true + } + +end Channel + +namespace Incoming /-- -Receives a chunk from the stream. Blocks if no data is available yet. Returns `none` if the stream -is closed and no data is available. The amount parameter is ignored for chunk streams. +Attempts to receive a chunk from the channel without blocking. +Returns `some chunk` only when a producer is already waiting. -/ -def recv (stream : Stream) (_count : Option UInt64) : Async (Option Chunk) := do - Async.ofTask (← recv' stream) +def tryRecv (incoming : Incoming) : Async (Option Chunk) := + incoming.state.atomically do + Channel.pruneFinishedWaiters + Channel.tryRecv' + +private def recv' (incoming : Incoming) : BaseIO (AsyncTask (Option Chunk)) := do + incoming.state.atomically do + Channel.pruneFinishedWaiters + + if let some chunk ← Channel.tryRecv' then + return AsyncTask.pure (some chunk) -private def trySend' (chunk : Chunk) : AtomicT State BaseIO Bool := do - while true do let st ← get - if let some (consumer, consumers) := st.consumers.dequeue? then - let newKnownSize := decreaseKnownSize st.knownSize chunk - let success ← consumer.resolve (some chunk) - set { st with consumers, knownSize := newKnownSize } - if success then - break - else - if st.amount + 1 <= st.capacity then - set { st with - values := st.values.enqueue chunk, - amount := st.amount + 1 - } - return true - else - return false - return true + if st.closed then + return AsyncTask.pure none -private def trySend (stream : Stream) (chunk : Chunk) : BaseIO Bool := do - stream.state.atomically do - if (← get).closed then - return false - else - trySend' chunk - -private def send' (stream : Stream) (chunk : Chunk) : BaseIO (Task (Except IO.Error Unit)) := do - stream.state.atomically do - if (← get).closed then - return .pure <| .error (.userError "channel closed") - else if ← trySend' chunk then - return .pure <| .ok () - else - let promise ← IO.Promise.new - let producer : Producer := { chunk, promise } - modify fun st => { st with producers := st.producers.enqueue producer } - return promise.result?.map (sync := true) fun res => - if res.getD false then .ok () else .error (.userError "channel closed") + if st.pendingConsumer.isSome then + return Task.pure (.error (IO.Error.userError "only one blocked consumer is allowed")) + + let promise ← IO.Promise.new + set { st with pendingConsumer := some (.normal promise) } + Channel.signalInterest + return promise.result?.map (sync := true) fun + | none => .error (IO.Error.userError "the promise linked to the consumer was dropped") + | some res => .ok res /-- -Sends a chunk to the stream. Blocks if the buffer is full. +Receives a chunk from the channel. Blocks until a producer sends one. +Returns `none` if the channel is closed and no producer is waiting. -/ -def send (stream : Stream) (chunk : Chunk) : Async Unit := do - if chunk.data.isEmpty then - return - - let res : AsyncTask _ ← send' stream chunk - await res +def recv (incoming : Incoming) (_count : Option UInt64) : Async (Option Chunk) := + do Async.ofAsyncTask (← recv' incoming) /-- -Gets the known size of the stream if available. Returns `none` if the size is not known. +Closes the channel. -/ -@[always_inline, inline] -def getKnownSize (stream : Stream) : Async (Option Body.Length) := do - stream.state.atomically do - return (← get).knownSize +def close (incoming : Incoming) : Async Unit := + incoming.state.atomically do + Channel.close' /-- -Sets the known size of the stream. Use this when the total expected size is known ahead of time. +Checks whether the channel is closed. -/ @[always_inline, inline] -def setKnownSize (stream : Stream) (size : Option Body.Length) : Async Unit := do - stream.state.atomically do - modify fun st => { st with knownSize := size } +def isClosed (incoming : Incoming) : Async Bool := + incoming.state.atomically do + return (← get).closed /-- -Closes the stream, preventing further sends and causing pending/future -recv operations to return `none` when no data is available. +Gets the known size if available. -/ -def close (stream : Stream) : Async Unit := do - stream.state.atomically do - let st ← get - if st.closed then return () - for consumer in st.consumers.toArray do - discard <| consumer.resolve none - for producer in st.producers.toArray do - producer.promise.resolve false - set { st with consumers := ∅, producers := ∅, closed := true } +@[always_inline, inline] +def getKnownSize (incoming : Incoming) : Async (Option Body.Length) := + incoming.state.atomically do + return (← get).knownSize /-- -Checks if the stream is closed. +Sets known size metadata. -/ @[always_inline, inline] -def isClosed (stream : Stream) : Async Bool := do - stream.state.atomically do - return (← get).closed - -@[inline] -private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : - AtomicT State m Bool := do - let st ← get - return !st.values.isEmpty || st.closed +def setKnownSize (incoming : Incoming) (size : Option Body.Length) : Async Unit := + incoming.state.atomically do + modify fun st => { st with knownSize := size } open Internal.IO.Async in - /-- -Creates a `Selector` that resolves once the `Stream` has data available and provides that data. +Creates a selector that resolves when a producer is waiting (or the channel closes). -/ -def recvSelector (stream : Stream) : Selector (Option Chunk) where +def recvSelector (incoming : Incoming) : Selector (Option Chunk) where tryFn := do - stream.state.atomically do - if ← recvReady' then - let val ← tryRecv' - return some val + incoming.state.atomically do + Channel.pruneFinishedWaiters + if ← Channel.recvReady' then + return some (← Channel.tryRecv') else return none registerFn waiter := do - stream.state.atomically do - if ← recvReady' then + incoming.state.atomically do + Channel.pruneFinishedWaiters + if ← Channel.recvReady' then let lose := return () let win promise := do - promise.resolve (.ok (← tryRecv')) - + promise.resolve (.ok (← Channel.tryRecv')) waiter.race lose win else - modify fun st => { st with consumers := st.consumers.enqueue (.select waiter) } + let st ← get + if st.pendingConsumer.isSome then + throw (.userError "only one blocked consumer is allowed") - unregisterFn := do - stream.state.atomically do - let st ← get - let consumers ← st.consumers.filterM - fun - | .normal .. => return true - | .select waiter => return !(← waiter.checkFinished) - set { st with consumers } + set { st with pendingConsumer := some (.select waiter) } + Channel.signalInterest -/-- -Sends data to the stream and writes a chunk to it. --/ -def writeChunk (stream : Stream) (chunk : Chunk) : Async Unit := - stream.send chunk + unregisterFn := do + incoming.state.atomically do + Channel.pruneFinishedWaiters /-- -Iterate over the stream content in chunks, processing each chunk with the given step function. +Iterates over chunks until the channel closes. -/ @[inline] protected partial def forIn - {β : Type} (stream : Stream) (acc : β) + {β : Type} (incoming : Incoming) (acc : β) (step : Chunk → β → Async (ForInStep β)) : Async β := do - let rec @[specialize] loop (stream : Stream) (acc : β) : Async β := do - if let some chunk ← stream.recv none then + let rec @[specialize] loop (incoming : Incoming) (acc : β) : Async β := do + if let some chunk ← incoming.recv none then match ← step chunk acc with | .done res => return res - | .yield res => loop stream res + | .yield res => loop incoming res else return acc - loop stream acc + loop incoming acc /-- -Iterate over the stream content in chunks, processing each chunk with the given step function. +Context-aware iteration over chunks until the channel closes. -/ @[inline] protected partial def forIn' - {β : Type} (stream : Stream) (acc : β) + {β : Type} (incoming : Incoming) (acc : β) (step : Chunk → β → ContextAsync (ForInStep β)) : ContextAsync β := do - let rec @[specialize] loop (stream : Stream) (acc : β) : ContextAsync β := do + let rec @[specialize] loop (incoming : Incoming) (acc : β) : ContextAsync β := do let data ← Selectable.one #[ - .case (stream.recvSelector) pure, + .case incoming.recvSelector pure, .case (← ContextAsync.doneSelector) (fun _ => pure none), ] if let some chunk := data then match ← step chunk acc with | .done res => return res - | .yield res => loop stream res + | .yield res => loop incoming res else return acc - loop stream acc - -instance : ForIn Async Stream Chunk where - forIn := Std.Http.Body.Stream.forIn - -instance : ForIn ContextAsync Stream Chunk where - forIn := Std.Http.Body.Stream.forIn' + loop incoming acc /-- -Reads all remaining chunks from the stream and returns the concatenated data as a `ByteArray`. -Blocks until the stream is closed. If `maximumSize` is provided, throws an `IO.Error` if the -total data exceeds that limit. +Reads all remaining chunks and decodes them into `α`. -/ -partial def readAll [FromByteArray α] (stream : Stream) (maximumSize : Option UInt64 := none) : ContextAsync α := do - let mut result := ByteArray.empty +partial def readAll + [FromByteArray α] + (incoming : Incoming) + (maximumSize : Option UInt64 := none) : + ContextAsync α := do + let rec loop (result : ByteArray) : ContextAsync ByteArray := do + let data ← Selectable.one #[ + .case incoming.recvSelector pure, + .case (← ContextAsync.doneSelector) (fun _ => pure none), + ] - for chunk in stream do - result := result ++ chunk.data - if let some max := maximumSize then - if result.size.toUInt64 > max then - throw (.userError s!"body exceeded maximum size of {max} bytes") + match data with + | none => return result + | some chunk => + let result := result ++ chunk.data + if let some max := maximumSize then + if result.size.toUInt64 > max then + throw (.userError s!"body exceeded maximum size of {max} bytes") + loop result + + let result ← loop ByteArray.empty match FromByteArray.fromByteArray result with | .ok a => return a | .error msg => throw (.userError msg) -end Std.Http.Body.Stream +end Incoming + +namespace Outgoing + +private def send' (outgoing : Outgoing) (chunk : Chunk) : BaseIO (AsyncTask Unit) := do + outgoing.state.atomically do + Channel.pruneFinishedWaiters + while true do + let st ← get + + if st.closed then + return Task.pure (.error (IO.Error.userError "channel closed")) + + if let some consumer := st.pendingConsumer then + let success ← consumer.resolve (some chunk) + if success then + set { + st with + pendingConsumer := none + knownSize := Channel.decreaseKnownSize st.knownSize chunk + } + return AsyncTask.pure () + else + set { st with pendingConsumer := none } + else + if st.pendingProducer.isSome then + return Task.pure (.error (IO.Error.userError "only one blocked producer is allowed")) + + let promise ← IO.Promise.new + set { st with pendingProducer := some { chunk, promise } } + return promise.result?.map (sync := true) fun + | none => .error (IO.Error.userError "the promise linked to the producer was dropped") + | some true => .ok () + | some false => .error (IO.Error.userError "channel closed") + return Task.pure (.error (IO.Error.userError "unreachable")) + +/-- +Sends a chunk. Blocks until a receiver is waiting. +-/ +def send (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do + if chunk.data.isEmpty then + return + + let res ← send' outgoing chunk + await res + +/-- Alias for `send`. -/ +def writeChunk (outgoing : Outgoing) (chunk : Chunk) : Async Unit := + outgoing.send chunk + +/-- Closes the channel. -/ +def close (outgoing : Outgoing) : Async Unit := + outgoing.state.atomically do + Channel.close' + +/-- Checks whether the channel is closed. -/ +@[always_inline, inline] +def isClosed (outgoing : Outgoing) : Async Bool := + outgoing.state.atomically do + return (← get).closed + +/-- Returns true when a consumer is currently blocked waiting for data. -/ +def hasInterest (outgoing : Outgoing) : Async Bool := + outgoing.state.atomically do + Channel.pruneFinishedWaiters + Channel.hasInterest' + +/-- Gets the known size if available. -/ +@[always_inline, inline] +def getKnownSize (outgoing : Outgoing) : Async (Option Body.Length) := + outgoing.state.atomically do + return (← get).knownSize + +/-- Sets known size metadata. -/ +@[always_inline, inline] +def setKnownSize (outgoing : Outgoing) (size : Option Body.Length) : Async Unit := + outgoing.state.atomically do + modify fun st => { st with knownSize := size } + +open Internal.IO.Async in +/-- +Creates a selector that resolves when consumer interest is present. +Returns `true` when a consumer is waiting, `false` when the channel closes first. +-/ +def interestSelector (outgoing : Outgoing) : Selector Bool where + tryFn := do + outgoing.state.atomically do + Channel.pruneFinishedWaiters + let st ← get + if st.pendingConsumer.isSome then + return some true + else if st.closed then + return some false + else + return none + + registerFn waiter := do + outgoing.state.atomically do + Channel.pruneFinishedWaiters + let st ← get + + if st.pendingConsumer.isSome then + let lose := return () + let win promise := do + promise.resolve (.ok true) + waiter.race lose win + else if st.closed then + let lose := return () + let win promise := do + promise.resolve (.ok false) + waiter.race lose win + else if st.interestWaiter.isSome then + throw (.userError "only one blocked interest selector is allowed") + else + set { st with interestWaiter := some waiter } + + unregisterFn := do + outgoing.state.atomically do + Channel.pruneFinishedWaiters + +end Outgoing + +/-- +Creates an incoming body from a producer function. +This is one of the high-level body constructors intended for builders. +-/ +def stream (gen : Outgoing → Async Unit) : Async Incoming := do + let (outgoing, incoming) ← mkChannel + background (gen outgoing) + return incoming + +/-- +Creates an incoming body from a fixed byte array. +This is one of the high-level body constructors intended for builders. +-/ +def fromBytes (content : ByteArray) : Async Incoming := do + let (outgoing, incoming) ← mkChannel + outgoing.setKnownSize (some (.fixed content.size)) + background do + outgoing.send (Chunk.ofByteArray content) + outgoing.close + return incoming + +/-- +Creates an empty incoming body. +This is one of the high-level body constructors intended for builders. +-/ +def empty : Async Incoming := do + let (outgoing, incoming) ← mkChannel + outgoing.setKnownSize (some (.fixed 0)) + outgoing.close + return incoming + +instance : ForIn Async Incoming Chunk where + forIn := Incoming.forIn + +instance : ForIn ContextAsync Incoming Chunk where + forIn := Incoming.forIn' + +end Std.Http.Body namespace Std.Http.Request.Builder open Internal.IO.Async +private def withContentLength + (builder : Request.Builder) + (size : Nat) : + Request.Builder := + Request.Builder.header builder Header.Name.contentLength (Header.Value.ofString! (toString size)) + /-- -Builds a request with a streaming body. The generator function receives the `Stream` and -can write chunks to it asynchronously. +Builds a request with a streaming body generator. -/ -def stream (builder : Builder) (gen : Body.Stream → Async Unit) : Async (Request Body.Stream) := do - let body ← Body.Stream.empty - background (gen body) - return { head := builder.head, body } +def stream + (builder : Request.Builder) + (gen : Body.Outgoing → Async Unit) : + Async (Request Body.Incoming) := do + let incoming ← Body.stream gen + return Request.Builder.body builder incoming + +private def emptyBody (builder : Request.Builder) : Async (Request Body.Incoming) := do + let incoming ← Body.empty + let builder := withContentLength builder 0 + return Request.Builder.body builder incoming /-- Builds a request with an empty body. -/ -def blank (builder : Builder) : Async (Request Body.Stream) := do - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed 0)) - body.close - return { head := builder.head, body } +def blank (builder : Request.Builder) : Async (Request Body.Incoming) := + emptyBody builder + +private def fromBytesCore + (builder : Request.Builder) + (content : ByteArray) : + Async (Request Body.Incoming) := do + let incoming ← Body.fromBytes content + let builder := withContentLength builder content.size + return Request.Builder.body builder incoming + +/-- +Builds a request from raw bytes. +-/ +def fromBytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Incoming) := + fromBytesCore builder content /-- -Builds a request with a text body. Sets Content-Type to text/plain and Content-Length automatically. --/ -def text (builder : Builder) (content : String) : Async (Request Body.Stream) := do - let bytes := content.toUTF8 - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed bytes.size)) - body.send (Chunk.ofByteArray bytes) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } +Builds a request with a binary body. +-/ +def bytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Incoming) := do + let builder := Request.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "application/octet-stream") + fromBytesCore builder content /-- -Builds a request with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. --/ -def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Stream) := do - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed content.size)) - body.send (Chunk.ofByteArray content) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) - return { head := { builder.head with headers }, body } +Builds a request with a text body. +-/ +def text (builder : Request.Builder) (content : String) : Async (Request Body.Incoming) := do + let builder := Request.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "text/plain; charset=utf-8") + fromBytesCore builder content.toUTF8 /-- -Builds a request with a JSON body. Sets Content-Type to application/json and Content-Length automatically. --/ -def json (builder : Builder) (content : String) : Async (Request Body.Stream) := do - let bytes := content.toUTF8 - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed bytes.size)) - body.send (Chunk.ofByteArray bytes) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } +Builds a request with a JSON body. +-/ +def json (builder : Request.Builder) (content : String) : Async (Request Body.Incoming) := do + let builder := Request.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "application/json") + fromBytesCore builder content.toUTF8 /-- -Builds a request with an HTML body. Sets Content-Type to text/html and Content-Length automatically. --/ -def html (builder : Builder) (content : String) : Async (Request Body.Stream) := do - let bytes := content.toUTF8 - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed bytes.size)) - body.send (Chunk.ofByteArray bytes) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } +Builds a request with an HTML body. +-/ +def html (builder : Request.Builder) (content : String) : Async (Request Body.Incoming) := do + let builder := Request.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "text/html; charset=utf-8") + fromBytesCore builder content.toUTF8 /-- -Builds a request with an empty body (alias for blank). +Builds a request with no body. -/ -def noBody (builder : Builder) : Async (Request Body.Stream) := - builder.blank +def noBody (builder : Request.Builder) : Async (Request Body.Incoming) := + Request.Builder.blank builder end Std.Http.Request.Builder namespace Std.Http.Response.Builder open Internal.IO.Async +private def withContentLength + (builder : Response.Builder) + (size : Nat) : + Response.Builder := + Response.Builder.header builder Header.Name.contentLength (Header.Value.ofString! (toString size)) + /-- -Builds a response with a streaming body. The generator function receives the `Stream` and -can write chunks to it asynchronously. +Builds a response with a streaming body generator. -/ -def stream (builder : Builder) (gen : Body.Stream → Async Unit) : Async (Response Body.Stream) := do - let body ← Body.Stream.empty - background (gen body) - return { head := builder.head, body } +def stream + (builder : Response.Builder) + (gen : Body.Outgoing → Async Unit) : + Async (Response Body.Incoming) := do + let incoming ← Body.stream gen + return Response.Builder.body builder incoming + +private def emptyBody (builder : Response.Builder) : Async (Response Body.Incoming) := do + let incoming ← Body.empty + let builder := withContentLength builder 0 + return Response.Builder.body builder incoming /-- Builds a response with an empty body. -/ -def blank (builder : Builder) : Async (Response Body.Stream) := do - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed 0)) - body.close - return { head := builder.head, body } +def blank (builder : Response.Builder) : Async (Response Body.Incoming) := + emptyBody builder + +private def fromBytesCore + (builder : Response.Builder) + (content : ByteArray) : + Async (Response Body.Incoming) := do + let incoming ← Body.fromBytes content + let builder := withContentLength builder content.size + return Response.Builder.body builder incoming + +/-- +Builds a response from raw bytes. +-/ +def fromBytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Incoming) := + fromBytesCore builder content /-- -Builds a response with a text body. Sets Content-Type to text/plain and Content-Length automatically. --/ -def text (builder : Builder) (content : String) : Async (Response Body.Stream) := do - let bytes := content.toUTF8 - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed bytes.size)) - body.send (Chunk.ofByteArray bytes) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } +Builds a response with a binary body. +-/ +def bytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Incoming) := do + let builder := Response.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "application/octet-stream") + fromBytesCore builder content /-- -Builds a response with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically. --/ -def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Stream) := do - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed content.size)) - body.send (Chunk.ofByteArray content) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size)) - return { head := { builder.head with headers }, body } +Builds a response with a text body. +-/ +def text (builder : Response.Builder) (content : String) : Async (Response Body.Incoming) := do + let builder := Response.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "text/plain; charset=utf-8") + fromBytesCore builder content.toUTF8 /-- -Builds a response with a JSON body. Sets Content-Type to application/json and Content-Length automatically. --/ -def json (builder : Builder) (content : String) : Async (Response Body.Stream) := do - let bytes := content.toUTF8 - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed bytes.size)) - body.send (Chunk.ofByteArray bytes) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "application/json") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } +Builds a response with a JSON body. +-/ +def json (builder : Response.Builder) (content : String) : Async (Response Body.Incoming) := do + let builder := Response.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "application/json") + fromBytesCore builder content.toUTF8 /-- -Builds a response with an HTML body. Sets Content-Type to text/html and Content-Length automatically. --/ -def html (builder : Builder) (content : String) : Async (Response Body.Stream) := do - let bytes := content.toUTF8 - let body ← Body.Stream.empty - body.setKnownSize (some (.fixed bytes.size)) - body.send (Chunk.ofByteArray bytes) - body.close - let headers := builder.head.headers - |>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") - |>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size)) - return { head := { builder.head with headers }, body } +Builds a response with an HTML body. +-/ +def html (builder : Response.Builder) (content : String) : Async (Response Body.Incoming) := do + let builder := Response.Builder.header + builder + Header.Name.contentType + (Header.Value.ofString! "text/html; charset=utf-8") + fromBytesCore builder content.toUTF8 /-- -Builds a response with an empty body (alias for blank). +Builds a response with no body. -/ -def noBody (builder : Builder) : Async (Response Body.Stream) := - builder.blank +def noBody (builder : Response.Builder) : Async (Response Body.Incoming) := + Response.Builder.blank builder end Std.Http.Response.Builder diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 976c90c8b4c0..9dac1ec8c835 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -4,237 +4,283 @@ open Std.Internal.IO Async open Std.Http open Std.Http.Body -/-! ## Stream tests -/ +/-! ## Channel tests -/ --- Test send followed by recv returns the chunk -def streamSendRecv : Async Unit := do - let stream ← Stream.empty +-- Test send and recv on rendezvous channel + +def channelSendRecv : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel let chunk := Chunk.ofByteArray "hello".toUTF8 - stream.send chunk - let result ← stream.recv none + + let sendTask ← async (t := AsyncTask) <| outgoing.send chunk + let result ← incoming.recv none + assert! result.isSome assert! result.get!.data == "hello".toUTF8 + await sendTask + +#eval channelSendRecv.block -#eval streamSendRecv.block +-- Test tryRecv on empty channel returns none --- Test tryRecv on empty stream returns none -def streamTryRecvEmpty : Async Unit := do - let stream ← Stream.empty - let result ← stream.tryRecv +def channelTryRecvEmpty : Async Unit := do + let (_outgoing, incoming) ← Body.mkChannel + let result ← incoming.tryRecv assert! result.isNone -#eval streamTryRecvEmpty.block +#eval channelTryRecvEmpty.block + +-- Test tryRecv consumes a waiting producer + +def channelTryRecvWithPendingSend : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + + let sendTask ← async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "data".toUTF8) + let mut result := none + let mut fuel := 100 + while result.isNone && fuel > 0 do + result ← incoming.tryRecv + if result.isNone then + let _ ← Selectable.one #[ + .case (← Selector.sleep 1) pure + ] + fuel := fuel - 1 --- Test tryRecv returns data when available -def streamTryRecvWithData : Async Unit := do - let stream ← Stream.empty - stream.send (Chunk.ofByteArray "data".toUTF8) - let result ← stream.tryRecv assert! result.isSome assert! result.get!.data == "data".toUTF8 + await sendTask + +#eval channelTryRecvWithPendingSend.block -#eval streamTryRecvWithData.block +-- Test close sets closed flag --- Test close sets the closed flag -def streamClose : Async Unit := do - let stream ← Stream.empty - assert! !(← stream.isClosed) - stream.close - assert! (← stream.isClosed) +def channelClose : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + assert! !(← outgoing.isClosed) + outgoing.close + assert! (← incoming.isClosed) -#eval streamClose.block +#eval channelClose.block --- Test recv on closed stream returns none -def streamRecvAfterClose : Async Unit := do - let stream ← Stream.empty - stream.close - let result ← stream.recv none +-- Test recv on closed channel returns none + +def channelRecvAfterClose : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + outgoing.close + let result ← incoming.recv none assert! result.isNone -#eval streamRecvAfterClose.block - --- Test FIFO ordering of multiple chunks -def streamMultipleFIFO : Async Unit := do - let stream ← Stream.empty - stream.send (Chunk.ofByteArray "one".toUTF8) - stream.send (Chunk.ofByteArray "two".toUTF8) - stream.send (Chunk.ofByteArray "three".toUTF8) - let r1 ← stream.recv none - let r2 ← stream.recv none - let r3 ← stream.recv none - assert! r1.get!.data == "one".toUTF8 - assert! r2.get!.data == "two".toUTF8 - assert! r3.get!.data == "three".toUTF8 - -#eval streamMultipleFIFO.block - --- Test for-in iteration collects all chunks until close -def streamForIn : Async Unit := do - let stream ← Stream.empty - stream.send (Chunk.ofByteArray "a".toUTF8) - stream.send (Chunk.ofByteArray "b".toUTF8) - stream.close +#eval channelRecvAfterClose.block + +-- Test for-in iteration collects chunks until close + +def channelForIn : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + + let producer ← async (t := AsyncTask) <| do + outgoing.send (Chunk.ofByteArray "a".toUTF8) + outgoing.send (Chunk.ofByteArray "b".toUTF8) + outgoing.close let mut acc : ByteArray := .empty - for chunk in stream do + for chunk in incoming do acc := acc ++ chunk.data + assert! acc == "ab".toUTF8 + await producer -#eval streamForIn.block +#eval channelForIn.block --- Test chunks preserve extensions -def streamExtensions : Async Unit := do - let stream ← Stream.empty +-- Test chunk extensions are preserved + +def channelExtensions : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some "value")] : Chunk } - stream.send chunk - let result ← stream.recv none + + let sendTask ← async (t := AsyncTask) <| outgoing.send chunk + let result ← incoming.recv none + assert! result.isSome assert! result.get!.extensions.size == 1 assert! result.get!.extensions[0]! == (.mk "key", some "value") + await sendTask -#eval streamExtensions.block +#eval channelExtensions.block --- Test set/get known size -def streamKnownSize : Async Unit := do - let stream ← Stream.empty - stream.setKnownSize (some (.fixed 100)) - let size ← stream.getKnownSize +-- Test known size metadata + +def channelKnownSize : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + outgoing.setKnownSize (some (.fixed 100)) + let size ← incoming.getKnownSize assert! size == some (.fixed 100) -#eval streamKnownSize.block +#eval channelKnownSize.block + +-- Test known size decreases when a chunk is consumed + +def channelKnownSizeDecreases : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + outgoing.setKnownSize (some (.fixed 5)) + + let sendTask ← async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "hello".toUTF8) + let _ ← incoming.recv none + await sendTask --- Test capacity: filling up to capacity succeeds via tryRecv check -def streamCapacityFull : Async Unit := do - let stream ← Stream.emptyWithCapacity (capacity := 3) - stream.send (Chunk.ofByteArray "a".toUTF8) - stream.send (Chunk.ofByteArray "b".toUTF8) - stream.send (Chunk.ofByteArray "c".toUTF8) - -- All three should be buffered - let r1 ← stream.tryRecv - let r2 ← stream.tryRecv - let r3 ← stream.tryRecv - let r4 ← stream.tryRecv - assert! r1.get!.data == "a".toUTF8 - assert! r2.get!.data == "b".toUTF8 - assert! r3.get!.data == "c".toUTF8 - assert! r4.isNone + let size ← incoming.getKnownSize + assert! size == some (.fixed 0) -#eval streamCapacityFull.block +#eval channelKnownSizeDecreases.block --- Test capacity: send blocks when buffer is full and resumes after recv -def streamCapacityBackpressure : Async Unit := do - let stream ← Stream.emptyWithCapacity (capacity := 2) - stream.send (Chunk.ofByteArray "a".toUTF8) - stream.send (Chunk.ofByteArray "b".toUTF8) +-- Test only one blocked producer is allowed - -- Spawn a send that should block because capacity is 2 - let sendTask ← async (t := AsyncTask) <| - stream.send (Chunk.ofByteArray "c".toUTF8) +def channelSingleProducerRule : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel - -- Consume one to free space - let r1 ← stream.recv none - assert! r1.get!.data == "a".toUTF8 + let send1 ← async (t := AsyncTask) <| do + try + outgoing.send (Chunk.ofByteArray "one".toUTF8) + return true + catch _ => + return false - -- Wait for the blocked send to complete - sendTask.block + let send2 ← async (t := AsyncTask) <| do + try + outgoing.send (Chunk.ofByteArray "two".toUTF8) + return true + catch _ => + return false - -- Now we should be able to recv the remaining two - let r2 ← stream.recv none - let r3 ← stream.recv none - assert! r2.get!.data == "b".toUTF8 - assert! r3.get!.data == "c".toUTF8 + let first ← incoming.recv none + assert! first.isSome -#eval streamCapacityBackpressure.block + outgoing.close --- Test capacity 1: only one chunk at a time -def streamCapacityOne : Async Unit := do - let stream ← Stream.emptyWithCapacity (capacity := 1) - stream.send (Chunk.ofByteArray "first".toUTF8) + let ok1 ← await send1 + let ok2 ← await send2 - let sendTask ← async (t := AsyncTask) <| - stream.send (Chunk.ofByteArray "second".toUTF8) + assert! (ok1 && !ok2) || (!ok1 && ok2) - let r1 ← stream.recv none - assert! r1.get!.data == "first".toUTF8 +#eval channelSingleProducerRule.block - sendTask.block +-- Test only one blocked consumer is allowed - let r2 ← stream.recv none - assert! r2.get!.data == "second".toUTF8 +def channelSingleConsumerRule : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel -#eval streamCapacityOne.block + let recv1 ← async (t := AsyncTask) <| incoming.recv none --- Test close unblocks pending producers -def streamCloseUnblocksProducers : Async Unit := do - let stream ← Stream.emptyWithCapacity (capacity := 1) - stream.send (Chunk.ofByteArray "fill".toUTF8) + let hasInterest ← Selectable.one #[ + .case outgoing.interestSelector pure + ] + assert! hasInterest - -- This send should block because buffer is full - let sendTask ← async (t := AsyncTask) <| + let recv2Failed ← try - stream.send (Chunk.ofByteArray "blocked".toUTF8) + let _ ← incoming.recv none + pure false catch _ => - pure () + pure true + + assert! recv2Failed - -- Close should unblock the producer (send gets error internally) - stream.close + let sendTask ← async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "ok".toUTF8) + let r1 ← await recv1 + assert! r1.isSome + assert! r1.get!.data == "ok".toUTF8 await sendTask -#eval streamCloseUnblocksProducers.block +#eval channelSingleConsumerRule.block + +-- Test hasInterest reflects blocked receiver state + +def channelHasInterest : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + assert! !(← outgoing.hasInterest) + + let recvTask ← async (t := AsyncTask) <| incoming.recv none + + let hasInterest ← Selectable.one #[ + .case outgoing.interestSelector pure + ] + assert! hasInterest + assert! (← outgoing.hasInterest) + + let sendTask ← async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "x".toUTF8) + let _ ← await recvTask + await sendTask + + assert! !(← outgoing.hasInterest) + +#eval channelHasInterest.block + +-- Test interestSelector resolves false when channel closes first + +def channelInterestSelectorClose : Async Unit := do + let (outgoing, _incoming) ← Body.mkChannel + + let waitInterest ← async (t := AsyncTask) <| + Selectable.one #[ + .case outgoing.interestSelector pure + ] + + outgoing.close + let interested ← await waitInterest + assert! interested == false + +#eval channelInterestSelectorClose.block /-! ## Request.Builder body tests -/ -- Test Request.Builder.text sets correct headers + def requestBuilderText : Async Unit := do let req ← Request.post (.originForm! "/api") |>.text "Hello, World!" + assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") - let body ← req.body.tryRecv + + let body ← req.body.recv none assert! body.isSome assert! body.get!.data == "Hello, World!".toUTF8 #eval requestBuilderText.block -- Test Request.Builder.json sets correct headers + def requestBuilderJson : Async Unit := do let req ← Request.post (.originForm! "/api") |>.json "{\"key\": \"value\"}" + assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") - let body ← req.body.tryRecv + let body ← req.body.recv none assert! body.isSome assert! body.get!.data == "{\"key\": \"value\"}".toUTF8 #eval requestBuilderJson.block --- Test Request.Builder.bytes sets correct headers -def requestBuilderBytes : Async Unit := do +-- Test Request.Builder.fromBytes sets content-length and body + +def requestBuilderFromBytes : Async Unit := do let data := ByteArray.mk #[0x01, 0x02, 0x03] let req ← Request.post (.originForm! "/api") - |>.bytes data - assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/octet-stream") + |>.fromBytes data + assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "3") - let body ← req.body.tryRecv + let body ← req.body.recv none assert! body.isSome assert! body.get!.data == data -#eval requestBuilderBytes.block - --- Test Request.Builder.html sets correct headers -def requestBuilderHtml : Async Unit := do - let req ← Request.post (.originForm! "/api") - |>.html "" - assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/html; charset=utf-8") - let body ← req.body.tryRecv - assert! body.isSome - -#eval requestBuilderHtml.block +#eval requestBuilderFromBytes.block -- Test Request.Builder.noBody creates empty body + def requestBuilderNoBody : Async Unit := do let req ← Request.get (.originForm! "/api") |>.noBody + let body ← req.body.tryRecv assert! body.isNone @@ -243,31 +289,53 @@ def requestBuilderNoBody : Async Unit := do /-! ## Response.Builder body tests -/ -- Test Response.Builder.text sets correct headers + def responseBuilderText : Async Unit := do let res ← Response.ok |>.text "Hello, World!" + assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") - let body ← res.body.tryRecv + + let body ← res.body.recv none assert! body.isSome assert! body.get!.data == "Hello, World!".toUTF8 #eval responseBuilderText.block -- Test Response.Builder.json sets correct headers + def responseBuilderJson : Async Unit := do let res ← Response.ok |>.json "{\"status\": \"ok\"}" + assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") - let body ← res.body.tryRecv + let body ← res.body.recv none assert! body.isSome + assert! body.get!.data == "{\"status\": \"ok\"}".toUTF8 #eval responseBuilderJson.block +-- Test Response.Builder.fromBytes sets content-length and body + +def responseBuilderFromBytes : Async Unit := do + let data := ByteArray.mk #[0xaa, 0xbb] + let res ← Response.ok + |>.fromBytes data + + assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "2") + let body ← res.body.recv none + assert! body.isSome + assert! body.get!.data == data + +#eval responseBuilderFromBytes.block + -- Test Response.Builder.noBody creates empty body + def responseBuilderNoBody : Async Unit := do let res ← Response.ok |>.noBody + let body ← res.body.tryRecv assert! body.isNone From df738acaa40f2d7a07b973c8fe11f0b69c343251 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Tue, 17 Feb 2026 01:55:26 -0300 Subject: [PATCH 31/44] fix: direction --- src/Std/Internal/Http/Data/Body/Stream.lean | 105 +++++++++++++------- tests/lean/run/async_http_body.lean | 22 ++-- 2 files changed, 81 insertions(+), 46 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 527808d4af78..32f8b71cb601 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -25,6 +25,9 @@ This module defines a zero-buffer rendezvous body channel split into two faces: - `Body.Outgoing`: producer side (send chunks) - `Body.Incoming`: consumer side (receive chunks) +Response/request builders produce `Body.Outgoing` because they only write body data. +Consumers and handlers receive `Body.Incoming` because they only read body data. + There is no queue and no capacity. A send waits for a receiver and a receive waits for a sender. At most one blocked producer and one blocked consumer are supported. -/ @@ -106,6 +109,22 @@ structure Outgoing where private state : Mutex Channel.State deriving Nonempty, TypeName +/- Internal conversions between channel faces. +Use these only in HTTP internals where body direction must be adapted. -/ +namespace Internal + +/-- Reinterprets the receive-side handle as a send-side handle over the same channel. -/ +@[always_inline, inline] +def incomingToOutgoing (incoming : Incoming) : Outgoing := + { state := incoming.state } + +/-- Reinterprets the send-side handle as a receive-side handle over the same channel. -/ +@[always_inline, inline] +def outgoingToIncoming (outgoing : Outgoing) : Incoming := + { state := outgoing.state } + +end Internal + /-- Creates a rendezvous body channel. -/ def mkChannel : Async (Outgoing × Incoming) := do let state ← Mutex.new { @@ -425,34 +444,46 @@ def send (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do let res ← send' outgoing chunk await res -/-- Alias for `send`. -/ +/-- +Alias for `send`. +-/ def writeChunk (outgoing : Outgoing) (chunk : Chunk) : Async Unit := outgoing.send chunk -/-- Closes the channel. -/ +/-- +Closes the channel. +-/ def close (outgoing : Outgoing) : Async Unit := outgoing.state.atomically do Channel.close' -/-- Checks whether the channel is closed. -/ +/-- +Checks whether the channel is closed. +-/ @[always_inline, inline] def isClosed (outgoing : Outgoing) : Async Bool := outgoing.state.atomically do return (← get).closed -/-- Returns true when a consumer is currently blocked waiting for data. -/ +/-- +Returns true when a consumer is currently blocked waiting for data. +-/ def hasInterest (outgoing : Outgoing) : Async Bool := outgoing.state.atomically do Channel.pruneFinishedWaiters Channel.hasInterest' -/-- Gets the known size if available. -/ +/-- +Gets the known size if available. +-/ @[always_inline, inline] def getKnownSize (outgoing : Outgoing) : Async (Option Body.Length) := outgoing.state.atomically do return (← get).knownSize -/-- Sets known size metadata. -/ +/-- +Sets known size metadata. +-/ @[always_inline, inline] def setKnownSize (outgoing : Outgoing) (size : Option Body.Length) : Async Unit := outgoing.state.atomically do @@ -502,8 +533,8 @@ def interestSelector (outgoing : Outgoing) : Selector Bool where end Outgoing /-- -Creates an incoming body from a producer function. -This is one of the high-level body constructors intended for builders. +Creates a body from a producer function. +Returns the receive-side handle; the producer writes via `Outgoing`. -/ def stream (gen : Outgoing → Async Unit) : Async Incoming := do let (outgoing, incoming) ← mkChannel @@ -511,8 +542,7 @@ def stream (gen : Outgoing → Async Unit) : Async Incoming := do return incoming /-- -Creates an incoming body from a fixed byte array. -This is one of the high-level body constructors intended for builders. +Creates a body from a fixed byte array. -/ def fromBytes (content : ByteArray) : Async Incoming := do let (outgoing, incoming) ← mkChannel @@ -523,8 +553,7 @@ def fromBytes (content : ByteArray) : Async Incoming := do return incoming /-- -Creates an empty incoming body. -This is one of the high-level body constructors intended for builders. +Creates an empty body. -/ def empty : Async Incoming := do let (outgoing, incoming) ← mkChannel @@ -555,39 +584,39 @@ Builds a request with a streaming body generator. def stream (builder : Request.Builder) (gen : Body.Outgoing → Async Unit) : - Async (Request Body.Incoming) := do + Async (Request Body.Outgoing) := do let incoming ← Body.stream gen - return Request.Builder.body builder incoming + return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming) -private def emptyBody (builder : Request.Builder) : Async (Request Body.Incoming) := do +private def emptyBody (builder : Request.Builder) : Async (Request Body.Outgoing) := do let incoming ← Body.empty let builder := withContentLength builder 0 - return Request.Builder.body builder incoming + return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming) /-- Builds a request with an empty body. -/ -def blank (builder : Request.Builder) : Async (Request Body.Incoming) := +def blank (builder : Request.Builder) : Async (Request Body.Outgoing) := emptyBody builder private def fromBytesCore (builder : Request.Builder) (content : ByteArray) : - Async (Request Body.Incoming) := do + Async (Request Body.Outgoing) := do let incoming ← Body.fromBytes content let builder := withContentLength builder content.size - return Request.Builder.body builder incoming + return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming) /-- Builds a request from raw bytes. -/ -def fromBytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Incoming) := +def fromBytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Outgoing) := fromBytesCore builder content /-- Builds a request with a binary body. -/ -def bytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Incoming) := do +def bytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Outgoing) := do let builder := Request.Builder.header builder Header.Name.contentType @@ -597,7 +626,7 @@ def bytes (builder : Request.Builder) (content : ByteArray) : Async (Request Bod /-- Builds a request with a text body. -/ -def text (builder : Request.Builder) (content : String) : Async (Request Body.Incoming) := do +def text (builder : Request.Builder) (content : String) : Async (Request Body.Outgoing) := do let builder := Request.Builder.header builder Header.Name.contentType @@ -607,7 +636,7 @@ def text (builder : Request.Builder) (content : String) : Async (Request Body.In /-- Builds a request with a JSON body. -/ -def json (builder : Request.Builder) (content : String) : Async (Request Body.Incoming) := do +def json (builder : Request.Builder) (content : String) : Async (Request Body.Outgoing) := do let builder := Request.Builder.header builder Header.Name.contentType @@ -617,7 +646,7 @@ def json (builder : Request.Builder) (content : String) : Async (Request Body.In /-- Builds a request with an HTML body. -/ -def html (builder : Request.Builder) (content : String) : Async (Request Body.Incoming) := do +def html (builder : Request.Builder) (content : String) : Async (Request Body.Outgoing) := do let builder := Request.Builder.header builder Header.Name.contentType @@ -627,7 +656,7 @@ def html (builder : Request.Builder) (content : String) : Async (Request Body.In /-- Builds a request with no body. -/ -def noBody (builder : Request.Builder) : Async (Request Body.Incoming) := +def noBody (builder : Request.Builder) : Async (Request Body.Outgoing) := Request.Builder.blank builder end Std.Http.Request.Builder @@ -647,39 +676,39 @@ Builds a response with a streaming body generator. def stream (builder : Response.Builder) (gen : Body.Outgoing → Async Unit) : - Async (Response Body.Incoming) := do + Async (Response Body.Outgoing) := do let incoming ← Body.stream gen - return Response.Builder.body builder incoming + return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming) -private def emptyBody (builder : Response.Builder) : Async (Response Body.Incoming) := do +private def emptyBody (builder : Response.Builder) : Async (Response Body.Outgoing) := do let incoming ← Body.empty let builder := withContentLength builder 0 - return Response.Builder.body builder incoming + return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming) /-- Builds a response with an empty body. -/ -def blank (builder : Response.Builder) : Async (Response Body.Incoming) := +def blank (builder : Response.Builder) : Async (Response Body.Outgoing) := emptyBody builder private def fromBytesCore (builder : Response.Builder) (content : ByteArray) : - Async (Response Body.Incoming) := do + Async (Response Body.Outgoing) := do let incoming ← Body.fromBytes content let builder := withContentLength builder content.size - return Response.Builder.body builder incoming + return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming) /-- Builds a response from raw bytes. -/ -def fromBytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Incoming) := +def fromBytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Outgoing) := fromBytesCore builder content /-- Builds a response with a binary body. -/ -def bytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Incoming) := do +def bytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Outgoing) := do let builder := Response.Builder.header builder Header.Name.contentType @@ -689,7 +718,7 @@ def bytes (builder : Response.Builder) (content : ByteArray) : Async (Response B /-- Builds a response with a text body. -/ -def text (builder : Response.Builder) (content : String) : Async (Response Body.Incoming) := do +def text (builder : Response.Builder) (content : String) : Async (Response Body.Outgoing) := do let builder := Response.Builder.header builder Header.Name.contentType @@ -699,7 +728,7 @@ def text (builder : Response.Builder) (content : String) : Async (Response Body. /-- Builds a response with a JSON body. -/ -def json (builder : Response.Builder) (content : String) : Async (Response Body.Incoming) := do +def json (builder : Response.Builder) (content : String) : Async (Response Body.Outgoing) := do let builder := Response.Builder.header builder Header.Name.contentType @@ -709,7 +738,7 @@ def json (builder : Response.Builder) (content : String) : Async (Response Body. /-- Builds a response with an HTML body. -/ -def html (builder : Response.Builder) (content : String) : Async (Response Body.Incoming) := do +def html (builder : Response.Builder) (content : String) : Async (Response Body.Outgoing) := do let builder := Response.Builder.header builder Header.Name.contentType @@ -719,7 +748,7 @@ def html (builder : Response.Builder) (content : String) : Async (Response Body. /-- Builds a response with no body. -/ -def noBody (builder : Response.Builder) : Async (Response Body.Incoming) := +def noBody (builder : Response.Builder) : Async (Response Body.Outgoing) := Response.Builder.blank builder end Std.Http.Response.Builder diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 9dac1ec8c835..12aa23e627d2 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -233,6 +233,12 @@ def channelInterestSelectorClose : Async Unit := do /-! ## Request.Builder body tests -/ +private def recvBuiltBody (body : Body.Outgoing) : Async (Option Chunk) := + (Body.Internal.outgoingToIncoming body).recv none + +private def tryRecvBuiltBody (body : Body.Outgoing) : Async (Option Chunk) := + (Body.Internal.outgoingToIncoming body).tryRecv + -- Test Request.Builder.text sets correct headers def requestBuilderText : Async Unit := do @@ -242,7 +248,7 @@ def requestBuilderText : Async Unit := do assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") - let body ← req.body.recv none + let body ← recvBuiltBody req.body assert! body.isSome assert! body.get!.data == "Hello, World!".toUTF8 @@ -255,7 +261,7 @@ def requestBuilderJson : Async Unit := do |>.json "{\"key\": \"value\"}" assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") - let body ← req.body.recv none + let body ← recvBuiltBody req.body assert! body.isSome assert! body.get!.data == "{\"key\": \"value\"}".toUTF8 @@ -269,7 +275,7 @@ def requestBuilderFromBytes : Async Unit := do |>.fromBytes data assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "3") - let body ← req.body.recv none + let body ← recvBuiltBody req.body assert! body.isSome assert! body.get!.data == data @@ -281,7 +287,7 @@ def requestBuilderNoBody : Async Unit := do let req ← Request.get (.originForm! "/api") |>.noBody - let body ← req.body.tryRecv + let body ← tryRecvBuiltBody req.body assert! body.isNone #eval requestBuilderNoBody.block @@ -297,7 +303,7 @@ def responseBuilderText : Async Unit := do assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") - let body ← res.body.recv none + let body ← recvBuiltBody res.body assert! body.isSome assert! body.get!.data == "Hello, World!".toUTF8 @@ -310,7 +316,7 @@ def responseBuilderJson : Async Unit := do |>.json "{\"status\": \"ok\"}" assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") - let body ← res.body.recv none + let body ← recvBuiltBody res.body assert! body.isSome assert! body.get!.data == "{\"status\": \"ok\"}".toUTF8 @@ -324,7 +330,7 @@ def responseBuilderFromBytes : Async Unit := do |>.fromBytes data assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "2") - let body ← res.body.recv none + let body ← recvBuiltBody res.body assert! body.isSome assert! body.get!.data == data @@ -336,7 +342,7 @@ def responseBuilderNoBody : Async Unit := do let res ← Response.ok |>.noBody - let body ← res.body.tryRecv + let body ← tryRecvBuiltBody res.body assert! body.isNone #eval responseBuilderNoBody.block From 6821bb82db88fce06a30f8a5176c416e6928640b Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Tue, 17 Feb 2026 21:25:06 -0300 Subject: [PATCH 32/44] feat: close after generate --- src/Std/Internal/Http/Data/Body/Stream.lean | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 32f8b71cb601..28b210a8c4d8 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -538,7 +538,7 @@ Returns the receive-side handle; the producer writes via `Outgoing`. -/ def stream (gen : Outgoing → Async Unit) : Async Incoming := do let (outgoing, incoming) ← mkChannel - background (gen outgoing) + try gen outgoing finally outgoing.close return incoming /-- @@ -547,9 +547,8 @@ Creates a body from a fixed byte array. def fromBytes (content : ByteArray) : Async Incoming := do let (outgoing, incoming) ← mkChannel outgoing.setKnownSize (some (.fixed content.size)) - background do - outgoing.send (Chunk.ofByteArray content) - outgoing.close + outgoing.send (Chunk.ofByteArray content) + outgoing.close return incoming /-- From 193bbddb4ee180f382de7d25d4fee1efad69b1aa Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Tue, 17 Feb 2026 23:37:16 -0300 Subject: [PATCH 33/44] feat: incomlpete chunks --- src/Std/Internal/Http/Data/Body/Stream.lean | 79 +++++++++++++++++++-- 1 file changed, 72 insertions(+), 7 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 28b210a8c4d8..dc45d5032a1b 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -93,6 +93,18 @@ private structure State where Known size of the stream if available -/ knownSize : Option Body.Length + + /-- + Buffered partial chunk data accumulated from `Outgoing.send ... (incomplete := true)`. + These partial pieces are collapsed and emitted as a single chunk on the next complete send. + -/ + pendingIncompleteChunk : Option Chunk := none + + /-- + Optional background producer task used by `Body.stream`. + Keeping this handle alive prevents the detached producer from being dropped early. + -/ + backgroundTask : Option (AsyncTask Unit) := none deriving Nonempty end Channel @@ -133,6 +145,7 @@ def mkChannel : Async (Outgoing × Incoming) := do interestWaiter := none closed := false knownSize := none + backgroundTask := none } return ({ state }, { state }) @@ -143,6 +156,12 @@ private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : | some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size)) | _ => knownSize +private def mergeChunks (base : Chunk) (next : Chunk) : Chunk := + { + data := base.data ++ next.data + extensions := if base.extensions.isEmpty then next.extensions else base.extensions + } + private def pruneFinishedWaiters [Monad m] [MonadLiftT (ST IO.RealWorld) m] : AtomicT State m Unit := do let st ← get @@ -220,6 +239,7 @@ private def close' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO pendingProducer := none pendingConsumer := none interestWaiter := none + pendingIncompleteChunk := none closed := true } @@ -402,6 +422,28 @@ end Incoming namespace Outgoing +private def collapseForSend + (outgoing : Outgoing) + (chunk : Chunk) + (incomplete : Bool) : BaseIO (Except IO.Error (Option Chunk)) := do + outgoing.state.atomically do + Channel.pruneFinishedWaiters + let st ← get + + if st.closed then + return .error (.userError "channel closed") + + let merged := match st.pendingIncompleteChunk with + | some pending => Channel.mergeChunks pending chunk + | none => chunk + + if incomplete then + set { st with pendingIncompleteChunk := some merged } + return .ok none + else + set { st with pendingIncompleteChunk := none } + return .ok (some merged) + private def send' (outgoing : Outgoing) (chunk : Chunk) : BaseIO (AsyncTask Unit) := do outgoing.state.atomically do Channel.pruneFinishedWaiters @@ -435,14 +477,25 @@ private def send' (outgoing : Outgoing) (chunk : Chunk) : BaseIO (AsyncTask Unit return Task.pure (.error (IO.Error.userError "unreachable")) /-- -Sends a chunk. Blocks until a receiver is waiting. +Sends a chunk. + +If `incomplete := true`, the chunk is buffered and collapsed with subsequent chunks, and is not +delivered to the receiver yet. +If `incomplete := false`, any buffered incomplete pieces are collapsed with this chunk and the +single merged chunk is delivered (blocking until a receiver is waiting). -/ -def send (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do - if chunk.data.isEmpty then +def send (outgoing : Outgoing) (chunk : Chunk) (incomplete : Bool := false) : Async Unit := do + if chunk.data.isEmpty ∧ chunk.extensions.isEmpty then return - let res ← send' outgoing chunk - await res + match (← collapseForSend outgoing chunk incomplete) with + | .error err => + throw err + | .ok none => + pure () + | .ok (some toSend) => + let res ← send' outgoing toSend + await res /-- Alias for `send`. @@ -534,11 +587,23 @@ end Outgoing /-- Creates a body from a producer function. -Returns the receive-side handle; the producer writes via `Outgoing`. +Returns the receive-side handle immediately and runs `gen` in a detached task. +The channel is always closed when `gen` returns or throws. +Errors from `gen` are not rethrown here; consumers observe end-of-stream via `recv = none`. +The detached task handle is retained in channel state for the channel lifetime. -/ def stream (gen : Outgoing → Async Unit) : Async Incoming := do let (outgoing, incoming) ← mkChannel - try gen outgoing finally outgoing.close + let task ← async (t := AsyncTask) <| do + try + gen outgoing + finally + outgoing.close + + incoming.state.atomically do + let st ← get + set { st with backgroundTask := some task } + return incoming /-- From 36465905061c8fa89f969a0a4710bd05afa82363 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 18 Feb 2026 08:43:33 -0300 Subject: [PATCH 34/44] feat: queue --- src/Std/Internal/Http/Data/Body/Stream.lean | 99 +++++++++++++++------ 1 file changed, 70 insertions(+), 29 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index dc45d5032a1b..a14342f5610d 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -20,7 +20,7 @@ public section /-! # Body Channels -This module defines a zero-buffer rendezvous body channel split into two faces: +This module defines a body channel split into two faces: - `Body.Outgoing`: producer side (send chunks) - `Body.Incoming`: consumer side (receive chunks) @@ -28,8 +28,10 @@ This module defines a zero-buffer rendezvous body channel split into two faces: Response/request builders produce `Body.Outgoing` because they only write body data. Consumers and handlers receive `Body.Incoming` because they only read body data. -There is no queue and no capacity. A send waits for a receiver and a receive waits for a sender. -At most one blocked producer and one blocked consumer are supported. +The channel supports an internal FIFO queue for pre-buffered chunks. +Sends enqueue chunks while buffer capacity is available; when full, one producer may block until +space is freed. At most one blocked producer, one blocked consumer, and one blocked +interest-selector are supported. -/ namespace Std.Http.Body @@ -70,9 +72,24 @@ private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Boo private structure State where /-- - A single blocked producer waiting for a receiver + Single blocked producer slot used when the internal queue is full. -/ - pendingProducer : Option Producer + pendingProducer : Option Producer := none + + /-- + FIFO chunks waiting to be consumed. + -/ + queuedChunks : Std.Queue Chunk := ∅ + + /-- + Number of chunks currently buffered in `queuedChunks`. + -/ + queuedSize : Nat := 0 + + /-- + Maximum number of buffered chunks allowed before producers block. + -/ + capacity : Nat /-- A single blocked consumer waiting for a producer @@ -99,12 +116,6 @@ private structure State where These partial pieces are collapsed and emitted as a single chunk on the next complete send. -/ pendingIncompleteChunk : Option Chunk := none - - /-- - Optional background producer task used by `Body.stream`. - Keeping this handle alive prevents the detached producer from being dropped early. - -/ - backgroundTask : Option (AsyncTask Unit) := none deriving Nonempty end Channel @@ -137,15 +148,18 @@ def outgoingToIncoming (outgoing : Outgoing) : Incoming := end Internal -/-- Creates a rendezvous body channel. -/ -def mkChannel : Async (Outgoing × Incoming) := do +/-- Creates a queue-backed body channel. -/ +def mkChannel (capacity : Nat := 64) : Async (Outgoing × Incoming) := do + let capacity := capacity.max 1 let state ← Mutex.new { pendingProducer := none + queuedChunks := ∅ + queuedSize := 0 + capacity := capacity pendingConsumer := none interestWaiter := none closed := false knownSize := none - backgroundTask := none } return ({ state }, { state }) @@ -198,7 +212,7 @@ private def signalInterest [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLift private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : AtomicT State m Bool := do let st ← get - return st.pendingProducer.isSome || st.closed + return st.queuedSize > 0 || st.pendingProducer.isSome || st.closed private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : AtomicT State m Bool := do @@ -208,7 +222,26 @@ private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : AtomicT State m (Option Chunk) := do let st ← get - if let some producer := st.pendingProducer then + if let some (chunk, queuedChunks) := st.queuedChunks.dequeue? then + let mut next := { + st with + queuedChunks + queuedSize := st.queuedSize - 1 + knownSize := decreaseKnownSize st.knownSize chunk + } + + if let some producer := st.pendingProducer then + producer.promise.resolve true + next := { + next with + pendingProducer := none + queuedChunks := next.queuedChunks.enqueue producer.chunk + queuedSize := next.queuedSize + 1 + } + + set next + return some chunk + else if let some producer := st.pendingProducer then producer.promise.resolve true set { st with @@ -249,7 +282,7 @@ namespace Incoming /-- Attempts to receive a chunk from the channel without blocking. -Returns `some chunk` only when a producer is already waiting. +Returns `some chunk` only when data is already queued. -/ def tryRecv (incoming : Incoming) : Async (Option Chunk) := incoming.state.atomically do @@ -278,8 +311,8 @@ private def recv' (incoming : Incoming) : BaseIO (AsyncTask (Option Chunk)) := d | some res => .ok res /-- -Receives a chunk from the channel. Blocks until a producer sends one. -Returns `none` if the channel is closed and no producer is waiting. +Receives a chunk from the channel. Blocks until data is available or the channel closes. +Returns `none` if the channel is closed and no queued data remains. -/ def recv (incoming : Incoming) (_count : Option UInt64) : Async (Option Chunk) := do Async.ofAsyncTask (← recv' incoming) @@ -317,7 +350,7 @@ def setKnownSize (incoming : Incoming) (size : Option Body.Length) : Async Unit open Internal.IO.Async in /-- -Creates a selector that resolves when a producer is waiting (or the channel closes). +Creates a selector that resolves when queued data is available (or the channel closes). -/ def recvSelector (incoming : Incoming) : Selector (Option Chunk) where tryFn := do @@ -464,6 +497,13 @@ private def send' (outgoing : Outgoing) (chunk : Chunk) : BaseIO (AsyncTask Unit return AsyncTask.pure () else set { st with pendingConsumer := none } + else if st.queuedSize < st.capacity then + set { + st with + queuedChunks := st.queuedChunks.enqueue chunk + queuedSize := st.queuedSize + 1 + } + return AsyncTask.pure () else if st.pendingProducer.isSome then return Task.pure (.error (IO.Error.userError "only one blocked producer is allowed")) @@ -482,7 +522,7 @@ Sends a chunk. If `incomplete := true`, the chunk is buffered and collapsed with subsequent chunks, and is not delivered to the receiver yet. If `incomplete := false`, any buffered incomplete pieces are collapsed with this chunk and the -single merged chunk is delivered (blocking until a receiver is waiting). +single merged chunk is sent. -/ def send (outgoing : Outgoing) (chunk : Chunk) (incomplete : Bool := false) : Async Unit := do if chunk.data.isEmpty ∧ chunk.extensions.isEmpty then @@ -590,20 +630,14 @@ Creates a body from a producer function. Returns the receive-side handle immediately and runs `gen` in a detached task. The channel is always closed when `gen` returns or throws. Errors from `gen` are not rethrown here; consumers observe end-of-stream via `recv = none`. -The detached task handle is retained in channel state for the channel lifetime. -/ def stream (gen : Outgoing → Async Unit) : Async Incoming := do let (outgoing, incoming) ← mkChannel - let task ← async (t := AsyncTask) <| do + background <| do try gen outgoing finally outgoing.close - - incoming.state.atomically do - let st ← get - set { st with backgroundTask := some task } - return incoming /-- @@ -612,7 +646,14 @@ Creates a body from a fixed byte array. def fromBytes (content : ByteArray) : Async Incoming := do let (outgoing, incoming) ← mkChannel outgoing.setKnownSize (some (.fixed content.size)) - outgoing.send (Chunk.ofByteArray content) + if content.size > 0 then + outgoing.state.atomically do + let st ← get + set { + st with + queuedChunks := st.queuedChunks.enqueue (Chunk.ofByteArray content) + queuedSize := st.queuedSize + 1 + } outgoing.close return incoming From a89a69e7da8d0567a0d5c00b2caea5dd7281d290 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Wed, 18 Feb 2026 08:52:59 -0300 Subject: [PATCH 35/44] fix: queue test --- tests/lean/run/async_http_body.lean | 50 ++++++++++++++++++++++------- 1 file changed, 38 insertions(+), 12 deletions(-) diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 12aa23e627d2..5d7fef284ecc 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -6,7 +6,7 @@ open Std.Http.Body /-! ## Channel tests -/ --- Test send and recv on rendezvous channel +-- Test send and recv on channel def channelSendRecv : Async Unit := do let (outgoing, incoming) ← Body.mkChannel @@ -21,6 +21,24 @@ def channelSendRecv : Async Unit := do #eval channelSendRecv.block +-- Test sends are buffered when no consumer is waiting + +def channelBufferedSends : Async Unit := do + let (outgoing, incoming) ← Body.mkChannel + + outgoing.send (Chunk.ofByteArray "one".toUTF8) + outgoing.send (Chunk.ofByteArray "two".toUTF8) + + let first ← incoming.recv none + let second ← incoming.recv none + + assert! first.isSome + assert! second.isSome + assert! first.get!.data == "one".toUTF8 + assert! second.get!.data == "two".toUTF8 + +#eval channelBufferedSends.block + -- Test tryRecv on empty channel returns none def channelTryRecvEmpty : Async Unit := do @@ -135,31 +153,39 @@ def channelKnownSizeDecreases : Async Unit := do -- Test only one blocked producer is allowed def channelSingleProducerRule : Async Unit := do - let (outgoing, incoming) ← Body.mkChannel + let (outgoing, incoming) ← Body.mkChannel (capacity := 1) + outgoing.send (Chunk.ofByteArray "one".toUTF8) - let send1 ← async (t := AsyncTask) <| do + let send2 ← async (t := AsyncTask) <| do try - outgoing.send (Chunk.ofByteArray "one".toUTF8) + outgoing.send (Chunk.ofByteArray "two".toUTF8) return true catch _ => return false - let send2 ← async (t := AsyncTask) <| do + -- Yield so `send2` can occupy the single blocked-producer slot. + let _ ← Selectable.one #[ + .case (← Selector.sleep 5) pure + ] + + let send3Failed ← try - outgoing.send (Chunk.ofByteArray "two".toUTF8) - return true + outgoing.send (Chunk.ofByteArray "three".toUTF8) + pure false catch _ => - return false + pure true + assert! send3Failed let first ← incoming.recv none assert! first.isSome + assert! first.get!.data == "one".toUTF8 - outgoing.close - - let ok1 ← await send1 let ok2 ← await send2 + assert! ok2 - assert! (ok1 && !ok2) || (!ok1 && ok2) + let second ← incoming.recv none + assert! second.isSome + assert! second.get!.data == "two".toUTF8 #eval channelSingleProducerRule.block From 42800e4037f91d2ea7721d49c9414408cd23296a Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 20 Feb 2026 11:37:35 -0300 Subject: [PATCH 36/44] feat: body type class --- src/Std/Internal/Http/Data/Body.lean | 4 + src/Std/Internal/Http/Data/Body/Empty.lean | 56 +++++ src/Std/Internal/Http/Data/Body/Full.lean | 236 ++++++++++++++++++++ src/Std/Internal/Http/Data/Body/Reader.lean | 57 +++++ src/Std/Internal/Http/Data/Body/Stream.lean | 163 +------------- src/Std/Internal/Http/Data/Body/Writer.lean | 179 +++++++++++++++ 6 files changed, 537 insertions(+), 158 deletions(-) create mode 100644 src/Std/Internal/Http/Data/Body/Empty.lean create mode 100644 src/Std/Internal/Http/Data/Body/Full.lean create mode 100644 src/Std/Internal/Http/Data/Body/Reader.lean create mode 100644 src/Std/Internal/Http/Data/Body/Writer.lean diff --git a/src/Std/Internal/Http/Data/Body.lean b/src/Std/Internal/Http/Data/Body.lean index a5c889877bf1..31a0524d0541 100644 --- a/src/Std/Internal/Http/Data/Body.lean +++ b/src/Std/Internal/Http/Data/Body.lean @@ -10,4 +10,8 @@ public import Std.Internal.Async.ContextAsync public import Std.Internal.Http.Data.Headers public import Std.Internal.Http.Data.Body.Basic public import Std.Internal.Http.Data.Body.Length +public import Std.Internal.Http.Data.Body.Reader +public import Std.Internal.Http.Data.Body.Writer public import Std.Internal.Http.Data.Body.Stream +public import Std.Internal.Http.Data.Body.Empty +public import Std.Internal.Http.Data.Body.Full diff --git a/src/Std/Internal/Http/Data/Body/Empty.lean b/src/Std/Internal/Http/Data/Body/Empty.lean new file mode 100644 index 000000000000..586229d9e6d2 --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Empty.lean @@ -0,0 +1,56 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Async +public import Std.Internal.Http.Data.Request +public import Std.Internal.Http.Data.Response +public import Std.Internal.Http.Data.Body.Length +public import Std.Internal.Http.Data.Chunk + +public section + +/-! +# Body.Empty + +Represents an always-empty, already-closed body handle. +-/ + +namespace Std.Http.Body +open Std Internal IO Async + +set_option linter.all true + +/-- +An empty body handle. +-/ +structure Empty where +deriving Inhabited + +end Std.Http.Body + +namespace Std.Http.Request.Builder +open Internal.IO.Async + +/-- +Builds a request with an empty body. +-/ +def blank (builder : Builder) : Async (Request Body.Empty) := + pure <| builder.body {} + +end Std.Http.Request.Builder + +namespace Std.Http.Response.Builder +open Internal.IO.Async + +/-- +Builds a response with an empty body. +-/ +def blank (builder : Builder) : Async (Response Body.Empty) := + pure <| builder.body {} + +end Std.Http.Response.Builder diff --git a/src/Std/Internal/Http/Data/Body/Full.lean b/src/Std/Internal/Http/Data/Body/Full.lean new file mode 100644 index 000000000000..589549cb07cf --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Full.lean @@ -0,0 +1,236 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Sync +public import Std.Internal.Async +public import Std.Internal.Http.Data.Request +public import Std.Internal.Http.Data.Response +public import Std.Internal.Http.Data.Body.Length +public import Std.Internal.Http.Data.Chunk +public import Init.Data.ByteArray + +public section + +/-! +# Body.Full + +A body backed by a fixed `ByteArray` held in a `Mutex`. + +The byte array is consumed at most once: the first call to `recv` or `tryRecv` atomically +takes the data and returns it as a single chunk; subsequent calls return `none` (end-of-stream). +Closing the body discards any unconsumed data. + +`Full` implements `Body.Writer`. The `Writer` instance is a no-op for sends since the content is +fixed at construction; it is provided so that `Full` can substitute for a streaming channel in +contexts that require a writable body handle. +-/ + +namespace Std.Http.Body +open Std Internal IO Async + +set_option linter.all true + +/-- +A body backed by a fixed, mutex-protected `ByteArray`. + +The data is consumed on the first read. Once consumed (or explicitly closed), the body +behaves as a closed, empty channel. +-/ +structure Full where + private mk :: + private state : Mutex (Option ByteArray) +deriving Nonempty + +namespace Full + +/-- +Creates a `Full` body from a `ByteArray`. +-/ +def ofByteArray (data : ByteArray) : Async Full := do + let state ← Mutex.new (some data) + return { state } + +/-- +Creates a `Full` body from a `String`. +-/ +def ofUTF8String (data : String) : Async Full := do + let state ← Mutex.new (some data.toUTF8) + return { state } + +/-- +Atomically takes the byte array and returns it as a chunk. +Returns `none` if the data has already been consumed or the body is closed. +-/ +def tryRecv (full : Full) : Async (Option Chunk) := + full.state.atomically do + match ← get with + | none => return none + | some data => + set (none : Option ByteArray) + if data.isEmpty then return none + return some (Chunk.ofByteArray data) + +/-- +Receives the body data. Returns the full byte array on the first call as a single chunk, +then `none` on all subsequent calls. + +The `count` hint is ignored; the entire content is always returned in one chunk. +-/ +def recv (full : Full) (_count : Option UInt64) : Async (Option Chunk) := + full.tryRecv + +/-- +No-op send for a fixed full body. +-/ +@[inline] +def send (_ : Full) (_ : Chunk) (_incomplete : Bool := false) : Async Unit := + pure () + +/-- +Closes the body, discarding any unconsumed data. +-/ +def close (full : Full) : Async Unit := + full.state.atomically do + set (none : Option ByteArray) + +/-- +Returns `true` when the data has been consumed or the body has been closed. +-/ +def isClosed (full : Full) : Async Bool := + full.state.atomically do + return (← get).isNone + +/-- +A fixed full body never has consumer interest. +-/ +@[inline] +def hasInterest (_ : Full) : Async Bool := + pure false + +/-- +Returns known-size metadata based on current remaining bytes. +-/ +def getKnownSize (full : Full) : Async (Option Body.Length) := + full.state.atomically do + match ← get with + | none => pure (some (.fixed 0)) + | some data => pure (some (.fixed data.size)) + +/-- +No-op metadata setter for a fixed full body. +-/ +@[inline] +def setKnownSize (_ : Full) (_ : Option Body.Length) : Async Unit := + pure () + +open Internal.IO.Async in +/-- +Selector that immediately resolves to `false` for interest. +-/ +def interestSelector (_ : Full) : Selector Bool where + tryFn := pure (some false) + registerFn waiter := do + let lose := pure () + let win promise := do + promise.resolve (.ok false) + waiter.race lose win + unregisterFn := pure () + +end Full + +end Std.Http.Body + +namespace Std.Http.Request.Builder +open Internal.IO.Async + +private def fromBytesCore + (builder : Builder) + (content : ByteArray) : + Async (Request Body.Full) := do + return builder.body (← Body.Full.ofByteArray content) + +/-- +Builds a request from raw bytes. +-/ +def fromBytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) := + fromBytesCore builder content + +/-- +Builds a request with a binary body. +-/ +def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream") + fromBytesCore builder content + +/-- +Builds a request with a text body. +-/ +def text (builder : Builder) (content : String) : Async (Request Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") + fromBytesCore builder content.toUTF8 + +/-- +Builds a request with a JSON body. +-/ +def json (builder : Builder) (content : String) : Async (Request Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/json") + fromBytesCore builder content.toUTF8 + +/-- +Builds a request with an HTML body. +-/ +def html (builder : Builder) (content : String) : Async (Request Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") + fromBytesCore builder content.toUTF8 + +end Std.Http.Request.Builder + +namespace Std.Http.Response.Builder +open Internal.IO.Async + +private def fromBytesCore + (builder : Builder) + (content : ByteArray) : + Async (Response Body.Full) := do + return builder.body (← Body.Full.ofByteArray content) + +/-- +Builds a response from raw bytes. +-/ +def fromBytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) := + fromBytesCore builder content + +/-- +Builds a response with a binary body. +-/ +def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream") + fromBytesCore builder content + +/-- +Builds a response with a text body. +-/ +def text (builder : Builder) (content : String) : Async (Response Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8") + fromBytesCore builder content.toUTF8 + +/-- +Builds a response with a JSON body. +-/ +def json (builder : Builder) (content : String) : Async (Response Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/json") + fromBytesCore builder content.toUTF8 + +/-- +Builds a response with an HTML body. +-/ +def html (builder : Builder) (content : String) : Async (Response Body.Full) := do + let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8") + fromBytesCore builder content.toUTF8 + +end Std.Http.Response.Builder diff --git a/src/Std/Internal/Http/Data/Body/Reader.lean b/src/Std/Internal/Http/Data/Body/Reader.lean new file mode 100644 index 000000000000..b38bb5bcc6cc --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Reader.lean @@ -0,0 +1,57 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Async +public import Std.Internal.Http.Data.Chunk +public import Std.Internal.Http.Data.Body.Basic +public import Std.Internal.Http.Data.Body.Stream + +public section + +/-! +# Body.Reader + +Reader typeclass for body-like values that can be consumed as chunk streams. +-/ + +namespace Std.Http.Body +open Std Internal IO Async + +set_option linter.all true + +/-- +Typeclass for values that can be read as HTTP body streams. +-/ +class Reader (α : Type) where + /-- + Receives the next body chunk. Returns `none` at end-of-stream. + -/ + recv : α → Option UInt64 → Async (Option Chunk) + + /-- + Closes the reader stream. + -/ + close : α → Async Unit + + /-- + Returns `true` when the reader stream is closed. + -/ + isClosed : α → Async Bool + + /-- + Selector that resolves when a chunk is available or EOF is reached. + -/ + recvSelector : α → Selector (Option Chunk) + +instance : Reader Incoming where + recv := Incoming.recv + close := Incoming.close + isClosed := Incoming.isClosed + recvSelector := Incoming.recvSelector + +end Std.Http.Body diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index a14342f5610d..9c093b81c926 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -8,6 +8,7 @@ module prelude public import Std.Sync public import Std.Internal.Async +public import Std.Internal.Http.Data.Extensions public import Std.Internal.Http.Data.Request public import Std.Internal.Http.Data.Response public import Std.Internal.Http.Data.Chunk @@ -677,183 +678,29 @@ end Std.Http.Body namespace Std.Http.Request.Builder open Internal.IO.Async -private def withContentLength - (builder : Request.Builder) - (size : Nat) : - Request.Builder := - Request.Builder.header builder Header.Name.contentLength (Header.Value.ofString! (toString size)) - /-- Builds a request with a streaming body generator. -/ def stream - (builder : Request.Builder) + (builder : Builder) (gen : Body.Outgoing → Async Unit) : Async (Request Body.Outgoing) := do let incoming ← Body.stream gen - return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming) - -private def emptyBody (builder : Request.Builder) : Async (Request Body.Outgoing) := do - let incoming ← Body.empty - let builder := withContentLength builder 0 - return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming) - -/-- -Builds a request with an empty body. --/ -def blank (builder : Request.Builder) : Async (Request Body.Outgoing) := - emptyBody builder - -private def fromBytesCore - (builder : Request.Builder) - (content : ByteArray) : - Async (Request Body.Outgoing) := do - let incoming ← Body.fromBytes content - let builder := withContentLength builder content.size - return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming) - -/-- -Builds a request from raw bytes. --/ -def fromBytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Outgoing) := - fromBytesCore builder content - -/-- -Builds a request with a binary body. --/ -def bytes (builder : Request.Builder) (content : ByteArray) : Async (Request Body.Outgoing) := do - let builder := Request.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "application/octet-stream") - fromBytesCore builder content - -/-- -Builds a request with a text body. --/ -def text (builder : Request.Builder) (content : String) : Async (Request Body.Outgoing) := do - let builder := Request.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "text/plain; charset=utf-8") - fromBytesCore builder content.toUTF8 - -/-- -Builds a request with a JSON body. --/ -def json (builder : Request.Builder) (content : String) : Async (Request Body.Outgoing) := do - let builder := Request.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "application/json") - fromBytesCore builder content.toUTF8 - -/-- -Builds a request with an HTML body. --/ -def html (builder : Request.Builder) (content : String) : Async (Request Body.Outgoing) := do - let builder := Request.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "text/html; charset=utf-8") - fromBytesCore builder content.toUTF8 - -/-- -Builds a request with no body. --/ -def noBody (builder : Request.Builder) : Async (Request Body.Outgoing) := - Request.Builder.blank builder + return builder.body (Body.Internal.incomingToOutgoing incoming) end Std.Http.Request.Builder namespace Std.Http.Response.Builder open Internal.IO.Async -private def withContentLength - (builder : Response.Builder) - (size : Nat) : - Response.Builder := - Response.Builder.header builder Header.Name.contentLength (Header.Value.ofString! (toString size)) - /-- Builds a response with a streaming body generator. -/ def stream - (builder : Response.Builder) + (builder : Builder) (gen : Body.Outgoing → Async Unit) : Async (Response Body.Outgoing) := do let incoming ← Body.stream gen - return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming) - -private def emptyBody (builder : Response.Builder) : Async (Response Body.Outgoing) := do - let incoming ← Body.empty - let builder := withContentLength builder 0 - return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming) - -/-- -Builds a response with an empty body. --/ -def blank (builder : Response.Builder) : Async (Response Body.Outgoing) := - emptyBody builder - -private def fromBytesCore - (builder : Response.Builder) - (content : ByteArray) : - Async (Response Body.Outgoing) := do - let incoming ← Body.fromBytes content - let builder := withContentLength builder content.size - return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming) - -/-- -Builds a response from raw bytes. --/ -def fromBytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Outgoing) := - fromBytesCore builder content - -/-- -Builds a response with a binary body. --/ -def bytes (builder : Response.Builder) (content : ByteArray) : Async (Response Body.Outgoing) := do - let builder := Response.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "application/octet-stream") - fromBytesCore builder content - -/-- -Builds a response with a text body. --/ -def text (builder : Response.Builder) (content : String) : Async (Response Body.Outgoing) := do - let builder := Response.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "text/plain; charset=utf-8") - fromBytesCore builder content.toUTF8 - -/-- -Builds a response with a JSON body. --/ -def json (builder : Response.Builder) (content : String) : Async (Response Body.Outgoing) := do - let builder := Response.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "application/json") - fromBytesCore builder content.toUTF8 - -/-- -Builds a response with an HTML body. --/ -def html (builder : Response.Builder) (content : String) : Async (Response Body.Outgoing) := do - let builder := Response.Builder.header - builder - Header.Name.contentType - (Header.Value.ofString! "text/html; charset=utf-8") - fromBytesCore builder content.toUTF8 - -/-- -Builds a response with no body. --/ -def noBody (builder : Response.Builder) : Async (Response Body.Outgoing) := - Response.Builder.blank builder + return builder.body (Body.Internal.incomingToOutgoing incoming) end Std.Http.Response.Builder diff --git a/src/Std/Internal/Http/Data/Body/Writer.lean b/src/Std/Internal/Http/Data/Body/Writer.lean new file mode 100644 index 000000000000..3fdff1c87296 --- /dev/null +++ b/src/Std/Internal/Http/Data/Body/Writer.lean @@ -0,0 +1,179 @@ +/- +Copyright (c) 2025 Lean FRO, LLC. All rights reserved. +Released under Apache 2.0 license as described in the file LICENSE. +Authors: Sofia Rodrigues +-/ +module + +prelude +public import Std.Internal.Async +public import Std.Internal.Http.Data.Body.Length +public import Std.Internal.Http.Data.Chunk +public import Std.Internal.Http.Data.Body.Stream +public import Std.Internal.Http.Data.Body.Full +public import Std.Internal.Http.Data.Body.Empty + +public section + +/-! +# Body.Writer + +Writer typeclass for body-like values that can produce chunk streams. +-/ + +namespace Std.Http.Body +open Std Internal IO Async + +set_option linter.all true + +/-- +Typeclass for values that can be written as HTTP body streams. +-/ +class Writer (α : Type) where + /-- + Sends a body chunk. + -/ + send : α → Chunk → Bool → Async Unit + + /-- + Closes the writer stream. + -/ + close : α → Async Unit + + /-- + Returns `true` when the writer stream is closed. + -/ + isClosed : α → Async Bool + + /-- + Returns `true` when a consumer is waiting for data. + -/ + hasInterest : α → Async Bool + + /-- + Gets known stream size metadata, if available. + -/ + getKnownSize : α → Async (Option Body.Length) + + /-- + Sets known stream size metadata. + -/ + setKnownSize : α → Option Body.Length → Async Unit + + /-- + Selector that resolves when consumer interest appears. + -/ + interestSelector : α → Selector Bool + +namespace Writer + +/-- +Sends a chunk with `incomplete := false`. +-/ +@[inline] +def writeChunk [Writer α] (body : α) (chunk : Chunk) : Async Unit := + Writer.send body chunk false + +end Writer + +/-- +Union of writer-capable body variants. +-/ +inductive AnyBody where + /-- + Channel-backed streaming body writer. + -/ + | outgoing (body : Outgoing) + /-- + Fixed full-body writer handle. + -/ + | full (body : Full) + /-- + Always-empty writer handle. + -/ + | empty (body : Empty) + +instance : Coe Outgoing AnyBody where + coe := .outgoing + +instance : Coe Full AnyBody where + coe := .full + +instance : Coe Empty AnyBody where + coe := .empty + +instance : Coe (Response Empty) (Response AnyBody) where + coe f := { f with } + +instance : Coe (Response Full) (Response AnyBody) where + coe f := { f with } + +instance : Coe (Response Outgoing) (Response AnyBody) where + coe f := { f with } + +instance : Writer Outgoing where + send body chunk incomplete := Outgoing.send body chunk incomplete + close := Outgoing.close + isClosed := Outgoing.isClosed + hasInterest := Outgoing.hasInterest + getKnownSize := Outgoing.getKnownSize + setKnownSize := Outgoing.setKnownSize + interestSelector := Outgoing.interestSelector + +instance : Writer Full where + send body chunk incomplete := Full.send body chunk incomplete + close := Full.close + isClosed := Full.isClosed + hasInterest := Full.hasInterest + getKnownSize := Full.getKnownSize + setKnownSize := Full.setKnownSize + interestSelector := Full.interestSelector + +instance : Writer Empty where + send _ _ _ := throw <| .userError "cannot send" + close _ := pure () + isClosed _ := pure false + hasInterest _ := pure false + getKnownSize _ := pure (some (.fixed 0)) + setKnownSize _ _ := pure () + interestSelector _ := { + tryFn := pure (some false) + registerFn waiter := do + let lose := pure () + let win promise := do + promise.resolve (.ok false) + waiter.race lose win + unregisterFn := pure () + } + +instance : Writer AnyBody where + send + | .outgoing body, chunk, incomplete => Writer.send body chunk incomplete + | .full body, chunk, incomplete => Writer.send body chunk incomplete + | .empty body, chunk, incomplete => Writer.send body chunk incomplete + close + | .outgoing body => Writer.close body + | .full body => Writer.close body + | .empty body => Writer.close body + isClosed + | .outgoing body => Writer.isClosed body + | .full body => Writer.isClosed body + | .empty body => Writer.isClosed body + hasInterest + | .outgoing body => Writer.hasInterest body + | .full body => Writer.hasInterest body + | .empty body => Writer.hasInterest body + getKnownSize + | .outgoing body => Writer.getKnownSize body + | .full body => Writer.getKnownSize body + | .empty body => Writer.getKnownSize body + setKnownSize + | .outgoing body, size => Writer.setKnownSize body size + | .full body, size => Writer.setKnownSize body size + | .empty body, size => Writer.setKnownSize body size + interestSelector + | .outgoing body => Writer.interestSelector body + | .full body => Writer.interestSelector body + | .empty body => Writer.interestSelector body + +end Std.Http.Body From c5db47444e50478b1162737c03354d507a75e551 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 20 Feb 2026 11:46:08 -0300 Subject: [PATCH 37/44] fix: bodyt ests --- tests/lean/run/async_http_body.lean | 123 ++++++++++++++++++++++++---- 1 file changed, 105 insertions(+), 18 deletions(-) diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 5d7fef284ecc..4f0d1895e109 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -257,13 +257,100 @@ def channelInterestSelectorClose : Async Unit := do #eval channelInterestSelectorClose.block +/-! ## Full tests -/ + +-- Test Full.recv returns content once then EOF + +def fullRecvConsumesOnce : Async Unit := do + let full ← Body.Full.ofUTF8String "hello" + let first ← full.recv none + let second ← full.recv none + + assert! first.isSome + assert! first.get!.data == "hello".toUTF8 + assert! second.isNone + +#eval fullRecvConsumesOnce.block + +-- Test Full known-size metadata tracks consumption + +def fullKnownSizeLifecycle : Async Unit := do + let data := ByteArray.mk #[0x01, 0x02, 0x03, 0x04] + let full ← Body.Full.ofByteArray data + + assert! (← full.getKnownSize) == some (.fixed 4) + let chunk ← full.tryRecv + assert! chunk.isSome + assert! chunk.get!.data == data + assert! (← full.getKnownSize) == some (.fixed 0) + +#eval fullKnownSizeLifecycle.block + +-- Test Full.close discards remaining content + +def fullClose : Async Unit := do + let full ← Body.Full.ofUTF8String "bye" + assert! !(← full.isClosed) + full.close + assert! (← full.isClosed) + assert! (← full.tryRecv).isNone + +#eval fullClose.block + +-- Test Full interest API always reports no consumer interest + +def fullInterest : Async Unit := do + let full ← Body.Full.ofUTF8String "x" + assert! !(← full.hasInterest) + let interested ← Selectable.one #[ + .case full.interestSelector pure + ] + assert! interested == false + +#eval fullInterest.block + +/-! ## Empty tests -/ + +-- Test Empty writer metadata and interest behavior + +def emptyWriterBasics : Async Unit := do + let body : Body.Empty := {} + assert! (← Writer.getKnownSize body) == some (.fixed 0) + assert! !(← Writer.isClosed body) + assert! !(← Writer.hasInterest body) + + Writer.setKnownSize body (some (.fixed 99)) + assert! (← Writer.getKnownSize body) == some (.fixed 0) + + Writer.close body + let interested ← Selectable.one #[ + .case (Writer.interestSelector body) pure + ] + assert! interested == false + +#eval emptyWriterBasics.block + +-- Test Empty writer rejects send + +def emptyWriterSendFails : Async Unit := do + let body : Body.Empty := {} + let failed ← + try + Writer.send body (Chunk.ofByteArray "x".toUTF8) false + pure false + catch _ => + pure true + assert! failed + +#eval emptyWriterSendFails.block + /-! ## Request.Builder body tests -/ -private def recvBuiltBody (body : Body.Outgoing) : Async (Option Chunk) := - (Body.Internal.outgoingToIncoming body).recv none +private def recvBuiltBody (body : Body.Full) : Async (Option Chunk) := + body.recv none -private def tryRecvBuiltBody (body : Body.Outgoing) : Async (Option Chunk) := - (Body.Internal.outgoingToIncoming body).tryRecv +private def emptyBodyKnownSize (body : Body.Empty) : Async (Option Body.Length) := + Writer.getKnownSize body -- Test Request.Builder.text sets correct headers @@ -272,7 +359,7 @@ def requestBuilderText : Async Unit := do |>.text "Hello, World!" assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") - assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") + assert! req.head.headers.get? Header.Name.contentLength == none let body ← recvBuiltBody req.body assert! body.isSome @@ -287,34 +374,34 @@ def requestBuilderJson : Async Unit := do |>.json "{\"key\": \"value\"}" assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") + assert! req.head.headers.get? Header.Name.contentLength == none let body ← recvBuiltBody req.body assert! body.isSome assert! body.get!.data == "{\"key\": \"value\"}".toUTF8 #eval requestBuilderJson.block --- Test Request.Builder.fromBytes sets content-length and body +-- Test Request.Builder.fromBytes sets body def requestBuilderFromBytes : Async Unit := do let data := ByteArray.mk #[0x01, 0x02, 0x03] let req ← Request.post (.originForm! "/api") |>.fromBytes data - assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "3") + assert! req.head.headers.get? Header.Name.contentLength == none let body ← recvBuiltBody req.body assert! body.isSome assert! body.get!.data == data #eval requestBuilderFromBytes.block --- Test Request.Builder.noBody creates empty body +-- Test Request.Builder.blank creates empty body def requestBuilderNoBody : Async Unit := do let req ← Request.get (.originForm! "/api") - |>.noBody + |>.blank - let body ← tryRecvBuiltBody req.body - assert! body.isNone + assert! (← emptyBodyKnownSize req.body) == some (.fixed 0) #eval requestBuilderNoBody.block @@ -327,7 +414,7 @@ def responseBuilderText : Async Unit := do |>.text "Hello, World!" assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8") - assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13") + assert! res.head.headers.get? Header.Name.contentLength == none let body ← recvBuiltBody res.body assert! body.isSome @@ -342,33 +429,33 @@ def responseBuilderJson : Async Unit := do |>.json "{\"status\": \"ok\"}" assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json") + assert! res.head.headers.get? Header.Name.contentLength == none let body ← recvBuiltBody res.body assert! body.isSome assert! body.get!.data == "{\"status\": \"ok\"}".toUTF8 #eval responseBuilderJson.block --- Test Response.Builder.fromBytes sets content-length and body +-- Test Response.Builder.fromBytes sets body def responseBuilderFromBytes : Async Unit := do let data := ByteArray.mk #[0xaa, 0xbb] let res ← Response.ok |>.fromBytes data - assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "2") + assert! res.head.headers.get? Header.Name.contentLength == none let body ← recvBuiltBody res.body assert! body.isSome assert! body.get!.data == data #eval responseBuilderFromBytes.block --- Test Response.Builder.noBody creates empty body +-- Test Response.Builder.blank creates empty body def responseBuilderNoBody : Async Unit := do let res ← Response.ok - |>.noBody + |>.blank - let body ← tryRecvBuiltBody res.body - assert! body.isNone + assert! (← emptyBodyKnownSize res.body) == some (.fixed 0) #eval responseBuilderNoBody.block From 541f9b2dc94f9e66f15841deb5414fa3f44582c5 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 20 Feb 2026 12:01:02 -0300 Subject: [PATCH 38/44] fix: rendezvouz stream --- src/Std/Internal/Http/Data/Body/Stream.lean | 242 +++++++------------- tests/lean/run/async_http_body.lean | 43 +--- 2 files changed, 92 insertions(+), 193 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 9c093b81c926..739967306ca8 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -8,7 +8,6 @@ module prelude public import Std.Sync public import Std.Internal.Async -public import Std.Internal.Http.Data.Extensions public import Std.Internal.Http.Data.Request public import Std.Internal.Http.Data.Response public import Std.Internal.Http.Data.Chunk @@ -21,18 +20,13 @@ public section /-! # Body Channels -This module defines a body channel split into two faces: +This module defines a zero-buffer rendezvous body channel split into two faces: - `Body.Outgoing`: producer side (send chunks) - `Body.Incoming`: consumer side (receive chunks) -Response/request builders produce `Body.Outgoing` because they only write body data. -Consumers and handlers receive `Body.Incoming` because they only read body data. - -The channel supports an internal FIFO queue for pre-buffered chunks. -Sends enqueue chunks while buffer capacity is available; when full, one producer may block until -space is freed. At most one blocked producer, one blocked consumer, and one blocked -interest-selector are supported. +There is no queue and no capacity. A send waits for a receiver and a receive waits for a sender. +At most one blocked producer and one blocked consumer are supported. -/ namespace Std.Http.Body @@ -61,7 +55,6 @@ private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := private structure Producer where chunk : Chunk - promise : IO.Promise Bool open Internal.IO.Async in private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Bool := do @@ -73,42 +66,27 @@ private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Boo private structure State where /-- - Single blocked producer slot used when the internal queue is full. - -/ - pendingProducer : Option Producer := none - - /-- - FIFO chunks waiting to be consumed. + A single blocked producer waiting for a receiver. -/ - queuedChunks : Std.Queue Chunk := ∅ + pendingProducer : Option Producer /-- - Number of chunks currently buffered in `queuedChunks`. - -/ - queuedSize : Nat := 0 - - /-- - Maximum number of buffered chunks allowed before producers block. - -/ - capacity : Nat - - /-- - A single blocked consumer waiting for a producer + A single blocked consumer waiting for a producer. -/ pendingConsumer : Option Consumer /-- - A waiter for `Outgoing.interestSelector` + A waiter for `Outgoing.interestSelector`. -/ interestWaiter : Option (Internal.IO.Async.Waiter Bool) /-- - Whether the channel is closed + Whether the channel is closed. -/ closed : Bool /-- - Known size of the stream if available + Known size of the stream if available. -/ knownSize : Option Body.Length @@ -133,30 +111,10 @@ structure Outgoing where private state : Mutex Channel.State deriving Nonempty, TypeName -/- Internal conversions between channel faces. -Use these only in HTTP internals where body direction must be adapted. -/ -namespace Internal - -/-- Reinterprets the receive-side handle as a send-side handle over the same channel. -/ -@[always_inline, inline] -def incomingToOutgoing (incoming : Incoming) : Outgoing := - { state := incoming.state } - -/-- Reinterprets the send-side handle as a receive-side handle over the same channel. -/ -@[always_inline, inline] -def outgoingToIncoming (outgoing : Outgoing) : Incoming := - { state := outgoing.state } - -end Internal - -/-- Creates a queue-backed body channel. -/ -def mkChannel (capacity : Nat := 64) : Async (Outgoing × Incoming) := do - let capacity := capacity.max 1 +/-- Creates a rendezvous body channel. -/ +def mkChannel : Async (Outgoing × Incoming) := do let state ← Mutex.new { pendingProducer := none - queuedChunks := ∅ - queuedSize := 0 - capacity := capacity pendingConsumer := none interestWaiter := none closed := false @@ -171,12 +129,6 @@ private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : | some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size)) | _ => knownSize -private def mergeChunks (base : Chunk) (next : Chunk) : Chunk := - { - data := base.data ++ next.data - extensions := if base.extensions.isEmpty then next.extensions else base.extensions - } - private def pruneFinishedWaiters [Monad m] [MonadLiftT (ST IO.RealWorld) m] : AtomicT State m Unit := do let st ← get @@ -213,7 +165,7 @@ private def signalInterest [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLift private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : AtomicT State m Bool := do let st ← get - return st.queuedSize > 0 || st.pendingProducer.isSome || st.closed + return st.pendingProducer.isSome || st.closed private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : AtomicT State m Bool := do @@ -223,27 +175,7 @@ private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] : private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] : AtomicT State m (Option Chunk) := do let st ← get - if let some (chunk, queuedChunks) := st.queuedChunks.dequeue? then - let mut next := { - st with - queuedChunks - queuedSize := st.queuedSize - 1 - knownSize := decreaseKnownSize st.knownSize chunk - } - - if let some producer := st.pendingProducer then - producer.promise.resolve true - next := { - next with - pendingProducer := none - queuedChunks := next.queuedChunks.enqueue producer.chunk - queuedSize := next.queuedSize + 1 - } - - set next - return some chunk - else if let some producer := st.pendingProducer then - producer.promise.resolve true + if let some producer := st.pendingProducer then set { st with pendingProducer := none @@ -262,9 +194,6 @@ private def close' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO if let some consumer := st.pendingConsumer then discard <| consumer.resolve none - if let some producer := st.pendingProducer then - producer.promise.resolve false - if let some waiter := st.interestWaiter then discard <| resolveInterestWaiter waiter false @@ -283,7 +212,7 @@ namespace Incoming /-- Attempts to receive a chunk from the channel without blocking. -Returns `some chunk` only when data is already queued. +Returns `some chunk` only when a producer is already waiting. -/ def tryRecv (incoming : Incoming) : Async (Option Chunk) := incoming.state.atomically do @@ -312,46 +241,39 @@ private def recv' (incoming : Incoming) : BaseIO (AsyncTask (Option Chunk)) := d | some res => .ok res /-- -Receives a chunk from the channel. Blocks until data is available or the channel closes. -Returns `none` if the channel is closed and no queued data remains. +Receives a chunk from the channel. Blocks until a producer sends one. +Returns `none` if the channel is closed and no producer is waiting. -/ def recv (incoming : Incoming) (_count : Option UInt64) : Async (Option Chunk) := do Async.ofAsyncTask (← recv' incoming) -/-- -Closes the channel. --/ +/-- Closes the channel. -/ def close (incoming : Incoming) : Async Unit := incoming.state.atomically do Channel.close' -/-- -Checks whether the channel is closed. --/ +/-- Checks whether the channel is closed. -/ @[always_inline, inline] def isClosed (incoming : Incoming) : Async Bool := incoming.state.atomically do return (← get).closed -/-- -Gets the known size if available. --/ +/-- Gets the known size if available. -/ @[always_inline, inline] def getKnownSize (incoming : Incoming) : Async (Option Body.Length) := incoming.state.atomically do return (← get).knownSize -/-- -Sets known size metadata. --/ +/-- Sets known size metadata. -/ @[always_inline, inline] def setKnownSize (incoming : Incoming) (size : Option Body.Length) : Async Unit := incoming.state.atomically do modify fun st => { st with knownSize := size } open Internal.IO.Async in + /-- -Creates a selector that resolves when queued data is available (or the channel closes). +Creates a selector that resolves when a producer is waiting (or the channel closes). -/ def recvSelector (incoming : Incoming) : Selector (Option Chunk) where tryFn := do @@ -468,7 +390,11 @@ private def collapseForSend return .error (.userError "channel closed") let merged := match st.pendingIncompleteChunk with - | some pending => Channel.mergeChunks pending chunk + | some pending => + { + data := pending.data ++ chunk.data + extensions := if pending.extensions.isEmpty then chunk.extensions else pending.extensions + } | none => chunk if incomplete then @@ -478,14 +404,21 @@ private def collapseForSend set { st with pendingIncompleteChunk := none } return .ok (some merged) -private def send' (outgoing : Outgoing) (chunk : Chunk) : BaseIO (AsyncTask Unit) := do - outgoing.state.atomically do - Channel.pruneFinishedWaiters - while true do +private def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do + let mut installedPendingProducer := false + while true do + let result ← outgoing.state.atomically do + Channel.pruneFinishedWaiters let st ← get if st.closed then - return Task.pure (.error (IO.Error.userError "channel closed")) + return Except.error (IO.Error.userError "channel closed") + + if installedPendingProducer then + if st.pendingProducer.isNone then + return Except.ok true + else + return Except.ok false if let some consumer := st.pendingConsumer then let success ← consumer.resolve (some chunk) @@ -495,27 +428,26 @@ private def send' (outgoing : Outgoing) (chunk : Chunk) : BaseIO (AsyncTask Unit pendingConsumer := none knownSize := Channel.decreaseKnownSize st.knownSize chunk } - return AsyncTask.pure () + return Except.ok true else set { st with pendingConsumer := none } - else if st.queuedSize < st.capacity then - set { - st with - queuedChunks := st.queuedChunks.enqueue chunk - queuedSize := st.queuedSize + 1 - } - return AsyncTask.pure () + return Except.ok false + else if st.pendingProducer.isSome then + return Except.error (IO.Error.userError "only one blocked producer is allowed") else - if st.pendingProducer.isSome then - return Task.pure (.error (IO.Error.userError "only one blocked producer is allowed")) + set { st with pendingProducer := some { chunk := chunk } } + return Except.ok false - let promise ← IO.Promise.new - set { st with pendingProducer := some { chunk, promise } } - return promise.result?.map (sync := true) fun - | none => .error (IO.Error.userError "the promise linked to the producer was dropped") - | some true => .ok () - | some false => .error (IO.Error.userError "channel closed") - return Task.pure (.error (IO.Error.userError "unreachable")) + match result with + | .error err => + throw err + | .ok true => + return () + | .ok false => + installedPendingProducer := true + let _ ← Selectable.one #[ + .case (← Selector.sleep 1) pure + ] /-- Sends a chunk. @@ -535,55 +467,43 @@ def send (outgoing : Outgoing) (chunk : Chunk) (incomplete : Bool := false) : As | .ok none => pure () | .ok (some toSend) => - let res ← send' outgoing toSend - await res + send' outgoing toSend -/-- -Alias for `send`. --/ +/-- Alias for `send`. -/ def writeChunk (outgoing : Outgoing) (chunk : Chunk) : Async Unit := outgoing.send chunk -/-- -Closes the channel. --/ +/-- Closes the channel. -/ def close (outgoing : Outgoing) : Async Unit := outgoing.state.atomically do Channel.close' -/-- -Checks whether the channel is closed. --/ +/-- Checks whether the channel is closed. -/ @[always_inline, inline] def isClosed (outgoing : Outgoing) : Async Bool := outgoing.state.atomically do return (← get).closed -/-- -Returns true when a consumer is currently blocked waiting for data. --/ +/-- Returns true when a consumer is currently blocked waiting for data. -/ def hasInterest (outgoing : Outgoing) : Async Bool := outgoing.state.atomically do Channel.pruneFinishedWaiters Channel.hasInterest' -/-- -Gets the known size if available. --/ +/-- Gets the known size if available. -/ @[always_inline, inline] def getKnownSize (outgoing : Outgoing) : Async (Option Body.Length) := outgoing.state.atomically do return (← get).knownSize -/-- -Sets known size metadata. --/ +/-- Sets known size metadata. -/ @[always_inline, inline] def setKnownSize (outgoing : Outgoing) (size : Option Body.Length) : Async Unit := outgoing.state.atomically do modify fun st => { st with knownSize := size } open Internal.IO.Async in + /-- Creates a selector that resolves when consumer interest is present. Returns `true` when a consumer is waiting, `false` when the channel closes first. @@ -626,6 +546,22 @@ def interestSelector (outgoing : Outgoing) : Selector Bool where end Outgoing +/- Internal conversions between channel faces. +Use these only in HTTP internals where body direction must be adapted. -/ +namespace Internal + +/-- Reinterprets the receive-side handle as a send-side handle over the same channel. -/ +@[always_inline, inline] +def incomingToOutgoing (incoming : Incoming) : Outgoing := + { state := incoming.state } + +/-- Reinterprets the send-side handle as a receive-side handle over the same channel. -/ +@[always_inline, inline] +def outgoingToIncoming (outgoing : Outgoing) : Incoming := + { state := outgoing.state } + +end Internal + /-- Creates a body from a producer function. Returns the receive-side handle immediately and runs `gen` in a detached task. @@ -645,18 +581,10 @@ def stream (gen : Outgoing → Async Unit) : Async Incoming := do Creates a body from a fixed byte array. -/ def fromBytes (content : ByteArray) : Async Incoming := do - let (outgoing, incoming) ← mkChannel - outgoing.setKnownSize (some (.fixed content.size)) - if content.size > 0 then - outgoing.state.atomically do - let st ← get - set { - st with - queuedChunks := st.queuedChunks.enqueue (Chunk.ofByteArray content) - queuedSize := st.queuedSize + 1 - } - outgoing.close - return incoming + stream fun outgoing => do + outgoing.setKnownSize (some (.fixed content.size)) + if content.size > 0 then + outgoing.send (Chunk.ofByteArray content) /-- Creates an empty body. @@ -686,7 +614,7 @@ def stream (gen : Body.Outgoing → Async Unit) : Async (Request Body.Outgoing) := do let incoming ← Body.stream gen - return builder.body (Body.Internal.incomingToOutgoing incoming) + return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming) end Std.Http.Request.Builder @@ -701,6 +629,6 @@ def stream (gen : Body.Outgoing → Async Unit) : Async (Response Body.Outgoing) := do let incoming ← Body.stream gen - return builder.body (Body.Internal.incomingToOutgoing incoming) + return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming) end Std.Http.Response.Builder diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 4f0d1895e109..544866203d1d 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -21,23 +21,6 @@ def channelSendRecv : Async Unit := do #eval channelSendRecv.block --- Test sends are buffered when no consumer is waiting - -def channelBufferedSends : Async Unit := do - let (outgoing, incoming) ← Body.mkChannel - - outgoing.send (Chunk.ofByteArray "one".toUTF8) - outgoing.send (Chunk.ofByteArray "two".toUTF8) - - let first ← incoming.recv none - let second ← incoming.recv none - - assert! first.isSome - assert! second.isSome - assert! first.get!.data == "one".toUTF8 - assert! second.get!.data == "two".toUTF8 - -#eval channelBufferedSends.block -- Test tryRecv on empty channel returns none @@ -153,39 +136,27 @@ def channelKnownSizeDecreases : Async Unit := do -- Test only one blocked producer is allowed def channelSingleProducerRule : Async Unit := do - let (outgoing, incoming) ← Body.mkChannel (capacity := 1) - outgoing.send (Chunk.ofByteArray "one".toUTF8) - - let send2 ← async (t := AsyncTask) <| do - try - outgoing.send (Chunk.ofByteArray "two".toUTF8) - return true - catch _ => - return false + let (outgoing, incoming) ← Body.mkChannel + let send1 ← async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "one".toUTF8) - -- Yield so `send2` can occupy the single blocked-producer slot. + -- Yield so `send1` can occupy the single pending-producer slot. let _ ← Selectable.one #[ .case (← Selector.sleep 5) pure ] - let send3Failed ← + let send2Failed ← try - outgoing.send (Chunk.ofByteArray "three".toUTF8) + outgoing.send (Chunk.ofByteArray "two".toUTF8) pure false catch _ => pure true - assert! send3Failed + assert! send2Failed let first ← incoming.recv none assert! first.isSome assert! first.get!.data == "one".toUTF8 - let ok2 ← await send2 - assert! ok2 - - let second ← incoming.recv none - assert! second.isSome - assert! second.get!.data == "two".toUTF8 + await send1 #eval channelSingleProducerRule.block From 93a6ecbbbc18e0858dae1fb9af709c18f8b0e1e5 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 20 Feb 2026 12:33:23 -0300 Subject: [PATCH 39/44] feat: reader features to write only things --- src/Std/Internal/Http/Data/Body/Empty.lean | 38 ++++++++++++++++++ src/Std/Internal/Http/Data/Body/Full.lean | 44 ++++++++++++++++++--- src/Std/Internal/Http/Data/Body/Reader.lean | 6 +++ 3 files changed, 82 insertions(+), 6 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Empty.lean b/src/Std/Internal/Http/Data/Body/Empty.lean index 586229d9e6d2..0852fb5acc26 100644 --- a/src/Std/Internal/Http/Data/Body/Empty.lean +++ b/src/Std/Internal/Http/Data/Body/Empty.lean @@ -10,6 +10,7 @@ public import Std.Internal.Async public import Std.Internal.Http.Data.Request public import Std.Internal.Http.Data.Response public import Std.Internal.Http.Data.Body.Length +public import Std.Internal.Http.Data.Body.Reader public import Std.Internal.Http.Data.Chunk public section @@ -31,6 +32,43 @@ An empty body handle. structure Empty where deriving Inhabited +namespace Empty + +/-- Receives from an empty body, always returning end-of-stream. -/ +@[inline] +def recv (_ : Empty) (_count : Option UInt64) : Async (Option Chunk) := + pure none + +/-- Closes an empty body (no-op). -/ +@[inline] +def close (_ : Empty) : Async Unit := + pure () + +/-- Empty bodies are always closed for reading. -/ +@[inline] +def isClosed (_ : Empty) : Async Bool := + pure true + +open Internal.IO.Async in +/-- Selector that immediately resolves with end-of-stream for an empty body. -/ +@[inline] +def recvSelector (_ : Empty) : Selector (Option Chunk) where + tryFn := pure (some none) + registerFn waiter := do + let lose := pure () + let win promise := do + promise.resolve (.ok none) + waiter.race lose win + unregisterFn := pure () + +end Empty + +instance : Reader Empty where + recv := Empty.recv + close := Empty.close + isClosed := Empty.isClosed + recvSelector := Empty.recvSelector + end Std.Http.Body namespace Std.Http.Request.Builder diff --git a/src/Std/Internal/Http/Data/Body/Full.lean b/src/Std/Internal/Http/Data/Body/Full.lean index 589549cb07cf..af9e5a362a94 100644 --- a/src/Std/Internal/Http/Data/Body/Full.lean +++ b/src/Std/Internal/Http/Data/Body/Full.lean @@ -11,6 +11,7 @@ public import Std.Internal.Async public import Std.Internal.Http.Data.Request public import Std.Internal.Http.Data.Response public import Std.Internal.Http.Data.Body.Length +public import Std.Internal.Http.Data.Body.Reader public import Std.Internal.Http.Data.Chunk public import Init.Data.ByteArray @@ -48,6 +49,18 @@ deriving Nonempty namespace Full +private def takeChunk [Monad m] [MonadLiftT (ST IO.RealWorld) m] : + AtomicT (Option ByteArray) m (Option Chunk) := do + match ← get with + | none => + pure none + | some data => + set (none : Option ByteArray) + if data.isEmpty then + pure none + else + pure (some (Chunk.ofByteArray data)) + /-- Creates a `Full` body from a `ByteArray`. -/ @@ -68,12 +81,7 @@ Returns `none` if the data has already been consumed or the body is closed. -/ def tryRecv (full : Full) : Async (Option Chunk) := full.state.atomically do - match ← get with - | none => return none - | some data => - set (none : Option ByteArray) - if data.isEmpty then return none - return some (Chunk.ofByteArray data) + takeChunk /-- Receives the body data. Returns the full byte array on the first call as a single chunk, @@ -141,8 +149,32 @@ def interestSelector (_ : Full) : Selector Bool where waiter.race lose win unregisterFn := pure () +open Internal.IO.Async in +/-- +Selector that immediately resolves to the remaining chunk (or EOF). +-/ +def recvSelector (full : Full) : Selector (Option Chunk) where + tryFn := do + let chunk ← full.state.atomically do + takeChunk + pure (some chunk) + registerFn waiter := do + let chunk ← full.state.atomically do + takeChunk + let lose := pure () + let win promise := do + promise.resolve (.ok chunk) + waiter.race lose win + unregisterFn := pure () + end Full +instance : Reader Full where + recv := Full.recv + close := Full.close + isClosed := Full.isClosed + recvSelector := Full.recvSelector + end Std.Http.Body namespace Std.Http.Request.Builder diff --git a/src/Std/Internal/Http/Data/Body/Reader.lean b/src/Std/Internal/Http/Data/Body/Reader.lean index b38bb5bcc6cc..b27e0cde7dc5 100644 --- a/src/Std/Internal/Http/Data/Body/Reader.lean +++ b/src/Std/Internal/Http/Data/Body/Reader.lean @@ -54,4 +54,10 @@ instance : Reader Incoming where isClosed := Incoming.isClosed recvSelector := Incoming.recvSelector +instance : Reader Outgoing where + recv body count := Reader.recv (Body.Internal.outgoingToIncoming body) count + close body := Reader.close (Body.Internal.outgoingToIncoming body) + isClosed body := Reader.isClosed (Body.Internal.outgoingToIncoming body) + recvSelector body := Reader.recvSelector (Body.Internal.outgoingToIncoming body) + end Std.Http.Body From 549e16f0697f410696182056f9cede08a1681cf4 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 20 Feb 2026 13:04:51 -0300 Subject: [PATCH 40/44] feat: add reader --- src/Std/Internal/Http/Data/Body/Writer.lean | 48 +++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/src/Std/Internal/Http/Data/Body/Writer.lean b/src/Std/Internal/Http/Data/Body/Writer.lean index 3fdff1c87296..6ac86272dc49 100644 --- a/src/Std/Internal/Http/Data/Body/Writer.lean +++ b/src/Std/Internal/Http/Data/Body/Writer.lean @@ -111,6 +111,36 @@ instance : Coe (Response Full) (Response AnyBody) where instance : Coe (Response Outgoing) (Response AnyBody) where coe f := { f with } +instance : Coe (ContextAsync (Response Empty)) (ContextAsync (Response AnyBody)) where + coe action := do + let response ← action + pure (response : Response AnyBody) + +instance : Coe (ContextAsync (Response Full)) (ContextAsync (Response AnyBody)) where + coe action := do + let response ← action + pure (response : Response AnyBody) + +instance : Coe (ContextAsync (Response Outgoing)) (ContextAsync (Response AnyBody)) where + coe action := do + let response ← action + pure (response : Response AnyBody) + +instance : Coe (Async (Response Empty)) (ContextAsync (Response AnyBody)) where + coe action := do + let response ← action + pure (response : Response AnyBody) + +instance : Coe (Async (Response Full)) (ContextAsync (Response AnyBody)) where + coe action := do + let response ← action + pure (response : Response AnyBody) + +instance : Coe (Async (Response Outgoing)) (ContextAsync (Response AnyBody)) where + coe action := do + let response ← action + pure (response : Response AnyBody) + instance : Writer Outgoing where send body chunk incomplete := Outgoing.send body chunk incomplete close := Outgoing.close @@ -176,4 +206,22 @@ instance : Writer AnyBody where | .full body => Writer.interestSelector body | .empty body => Writer.interestSelector body +instance : Reader AnyBody where + recv + | .outgoing body, count => Reader.recv body count + | .full body, count => Reader.recv body count + | .empty body, count => Reader.recv body count + close + | .outgoing body => Reader.close body + | .full body => Reader.close body + | .empty body => Reader.close body + isClosed + | .outgoing body => Reader.isClosed body + | .full body => Reader.isClosed body + | .empty body => Reader.isClosed body + recvSelector + | .outgoing body => Reader.recvSelector body + | .full body => Reader.recvSelector body + | .empty body => Reader.recvSelector body + end Std.Http.Body From 74dc55152f88a672fdee619eef2759b3fc818fe3 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Tue, 24 Feb 2026 14:31:29 -0300 Subject: [PATCH 41/44] fix: test --- tests/lean/run/async_http_body.lean | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 544866203d1d..39abadf09ab7 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -96,14 +96,14 @@ def channelForIn : Async Unit := do def channelExtensions : Async Unit := do let (outgoing, incoming) ← Body.mkChannel - let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some "value")] : Chunk } + let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some (Chunk.ExtensionValue.ofString! "value"))] : Chunk } let sendTask ← async (t := AsyncTask) <| outgoing.send chunk let result ← incoming.recv none assert! result.isSome assert! result.get!.extensions.size == 1 - assert! result.get!.extensions[0]! == (.mk "key", some "value") + assert! result.get!.extensions[0]! == (Chunk.ExtensionName.mk "key", some <| .ofString! "value") await sendTask #eval channelExtensions.block From c26664945494c4eacd420363a8b82c078d2a3391 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Tue, 24 Feb 2026 20:16:05 -0300 Subject: [PATCH 42/44] fix: sleep --- src/Std/Internal/Http/Data/Body/Stream.lean | 47 ++++++++++++--------- 1 file changed, 27 insertions(+), 20 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index 739967306ca8..ec1a4d47d285 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -55,6 +55,8 @@ private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := private structure Producer where chunk : Chunk + /-- Resolved with `true` when consumed by a receiver, `false` when the channel closes. -/ + done : IO.Promise Bool open Internal.IO.Async in private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Bool := do @@ -181,6 +183,7 @@ private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT Base pendingProducer := none knownSize := decreaseKnownSize st.knownSize producer.chunk } + discard <| producer.done.resolve true return some producer.chunk else return none @@ -197,6 +200,9 @@ private def close' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO if let some waiter := st.interestWaiter then discard <| resolveInterestWaiter waiter false + if let some producer := st.pendingProducer then + discard <| producer.done.resolve false + set { st with pendingProducer := none @@ -404,21 +410,17 @@ private def collapseForSend set { st with pendingIncompleteChunk := none } return .ok (some merged) +-- Returns `some true` = delivered directly, `some false` = consumer race lost (retry), +-- `none` = producer installed, caller must await `done`. private def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do - let mut installedPendingProducer := false + let done ← IO.Promise.new while true do - let result ← outgoing.state.atomically do + let result : Except IO.Error (Option Bool) ← outgoing.state.atomically do Channel.pruneFinishedWaiters let st ← get if st.closed then - return Except.error (IO.Error.userError "channel closed") - - if installedPendingProducer then - if st.pendingProducer.isNone then - return Except.ok true - else - return Except.ok false + return .error (IO.Error.userError "channel closed") if let some consumer := st.pendingConsumer then let success ← consumer.resolve (some chunk) @@ -428,26 +430,31 @@ private def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do pendingConsumer := none knownSize := Channel.decreaseKnownSize st.knownSize chunk } - return Except.ok true + return .ok (some true) else + -- Consumer's selector race was lost; clear the stale entry and retry. set { st with pendingConsumer := none } - return Except.ok false + return .ok (some false) else if st.pendingProducer.isSome then - return Except.error (IO.Error.userError "only one blocked producer is allowed") + return .error (IO.Error.userError "only one blocked producer is allowed") else - set { st with pendingProducer := some { chunk := chunk } } - return Except.ok false + set { st with pendingProducer := some { chunk, done } } + return .ok none match result with | .error err => throw err - | .ok true => + | .ok (some true) => return () - | .ok false => - installedPendingProducer := true - let _ ← Selectable.one #[ - .case (← Selector.sleep 1) pure - ] + | .ok (some false) => + -- Retry immediately; no sleep needed, no producer installed yet. + pure () + | .ok none => + -- Producer is installed; block until the consumer signals via `done`. + -- `done` resolves with `true` when consumed, `false` when the channel closes. + match ← await done.result? with + | some true => return () + | _ => throw (IO.Error.userError "channel closed") /-- Sends a chunk. From beedfa1e4e359e4376b019fb4dfc7e9c88f84079 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Thu, 26 Feb 2026 16:49:15 -0300 Subject: [PATCH 43/44] fix: small comments fix and parameters --- src/Std/Internal/Http/Data/Body/Basic.lean | 4 +- src/Std/Internal/Http/Data/Body/Empty.lean | 19 +++- src/Std/Internal/Http/Data/Body/Full.lean | 4 +- src/Std/Internal/Http/Data/Body/Length.lean | 2 +- src/Std/Internal/Http/Data/Body/Reader.lean | 4 +- src/Std/Internal/Http/Data/Body/Stream.lean | 104 ++++++++++++-------- src/Std/Internal/Http/Data/Body/Writer.lean | 6 +- tests/lean/run/async_http_body.lean | 22 ++--- 8 files changed, 96 insertions(+), 69 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Basic.lean b/src/Std/Internal/Http/Data/Body/Basic.lean index 54ef13b51690..32c0597777d3 100644 --- a/src/Std/Internal/Http/Data/Body/Basic.lean +++ b/src/Std/Internal/Http/Data/Body/Basic.lean @@ -13,7 +13,7 @@ public import Std.Internal.Http.Data.Body.Length public section /-! -# Body +# Body.Basic This module defines shared types for HTTP body handling. -/ @@ -28,7 +28,7 @@ Typeclass for types that can be converted to a `ByteArray`. class ToByteArray (α : Type) where /-- - Transforms into a `ByteArray` + Transforms into a `ByteArray`. -/ toByteArray : α → ByteArray diff --git a/src/Std/Internal/Http/Data/Body/Empty.lean b/src/Std/Internal/Http/Data/Body/Empty.lean index 0852fb5acc26..3e1fd34c784b 100644 --- a/src/Std/Internal/Http/Data/Body/Empty.lean +++ b/src/Std/Internal/Http/Data/Body/Empty.lean @@ -34,23 +34,32 @@ deriving Inhabited namespace Empty -/-- Receives from an empty body, always returning end-of-stream. -/ +/-- +Receives from an empty body, always returning end-of-stream. +-/ @[inline] -def recv (_ : Empty) (_count : Option UInt64) : Async (Option Chunk) := +def recv (_ : Empty) : Async (Option Chunk) := pure none -/-- Closes an empty body (no-op). -/ +/-- +Closes an empty body (no-op). +-/ @[inline] def close (_ : Empty) : Async Unit := pure () -/-- Empty bodies are always closed for reading. -/ +/-- +Empty bodies are always closed for reading. +-/ @[inline] def isClosed (_ : Empty) : Async Bool := pure true open Internal.IO.Async in -/-- Selector that immediately resolves with end-of-stream for an empty body. -/ + +/-- +Selector that immediately resolves with end-of-stream for an empty body. +-/ @[inline] def recvSelector (_ : Empty) : Selector (Option Chunk) where tryFn := pure (some none) diff --git a/src/Std/Internal/Http/Data/Body/Full.lean b/src/Std/Internal/Http/Data/Body/Full.lean index af9e5a362a94..0c33df8c2bbe 100644 --- a/src/Std/Internal/Http/Data/Body/Full.lean +++ b/src/Std/Internal/Http/Data/Body/Full.lean @@ -86,10 +86,8 @@ def tryRecv (full : Full) : Async (Option Chunk) := /-- Receives the body data. Returns the full byte array on the first call as a single chunk, then `none` on all subsequent calls. - -The `count` hint is ignored; the entire content is always returned in one chunk. -/ -def recv (full : Full) (_count : Option UInt64) : Async (Option Chunk) := +def recv (full : Full) : Async (Option Chunk) := full.tryRecv /-- diff --git a/src/Std/Internal/Http/Data/Body/Length.lean b/src/Std/Internal/Http/Data/Body/Length.lean index 9996f7f40427..593bb61811a8 100644 --- a/src/Std/Internal/Http/Data/Body/Length.lean +++ b/src/Std/Internal/Http/Data/Body/Length.lean @@ -11,7 +11,7 @@ public import Init.Data.Repr public section /-! -# Length +# Body.Length This module defines the `Length` type, that represents the Content-Length or Transfer-Encoding of an HTTP request or response. diff --git a/src/Std/Internal/Http/Data/Body/Reader.lean b/src/Std/Internal/Http/Data/Body/Reader.lean index b27e0cde7dc5..5de6e46f342f 100644 --- a/src/Std/Internal/Http/Data/Body/Reader.lean +++ b/src/Std/Internal/Http/Data/Body/Reader.lean @@ -31,7 +31,7 @@ class Reader (α : Type) where /-- Receives the next body chunk. Returns `none` at end-of-stream. -/ - recv : α → Option UInt64 → Async (Option Chunk) + recv : α → Async (Option Chunk) /-- Closes the reader stream. @@ -55,7 +55,7 @@ instance : Reader Incoming where recvSelector := Incoming.recvSelector instance : Reader Outgoing where - recv body count := Reader.recv (Body.Internal.outgoingToIncoming body) count + recv body := Reader.recv (Body.Internal.outgoingToIncoming body) close body := Reader.close (Body.Internal.outgoingToIncoming body) isClosed body := Reader.isClosed (Body.Internal.outgoingToIncoming body) recvSelector body := Reader.recvSelector (Body.Internal.outgoingToIncoming body) diff --git a/src/Std/Internal/Http/Data/Body/Stream.lean b/src/Std/Internal/Http/Data/Body/Stream.lean index ec1a4d47d285..bcb020ed7602 100644 --- a/src/Std/Internal/Http/Data/Body/Stream.lean +++ b/src/Std/Internal/Http/Data/Body/Stream.lean @@ -18,7 +18,7 @@ public import Init.Data.ByteArray public section /-! -# Body Channels +# Body.Stream This module defines a zero-buffer rendezvous body channel split into two faces: @@ -55,7 +55,10 @@ private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := private structure Producer where chunk : Chunk - /-- Resolved with `true` when consumed by a receiver, `false` when the channel closes. -/ + + /-- + Resolved with `true` when consumed by a receiver, `false` when the channel closes. + -/ done : IO.Promise Bool open Internal.IO.Async in @@ -101,19 +104,25 @@ deriving Nonempty end Channel -/-- Receive-side face of a body channel. -/ +/-- +Receive-side face of a body channel. +-/ structure Incoming where private mk :: private state : Mutex Channel.State deriving Nonempty, TypeName -/-- Send-side face of a body channel. -/ +/-- +Send-side face of a body channel. +-/ structure Outgoing where private mk :: private state : Mutex Channel.State deriving Nonempty, TypeName -/-- Creates a rendezvous body channel. -/ +/-- +Creates a rendezvous body channel. +-/ def mkChannel : Async (Outgoing × Incoming) := do let state ← Mutex.new { pendingProducer := none @@ -250,34 +259,41 @@ private def recv' (incoming : Incoming) : BaseIO (AsyncTask (Option Chunk)) := d Receives a chunk from the channel. Blocks until a producer sends one. Returns `none` if the channel is closed and no producer is waiting. -/ -def recv (incoming : Incoming) (_count : Option UInt64) : Async (Option Chunk) := - do Async.ofAsyncTask (← recv' incoming) +def recv (incoming : Incoming) : Async (Option Chunk) := do + Async.ofAsyncTask (← recv' incoming) -/-- Closes the channel. -/ +/-- +Closes the channel. +-/ def close (incoming : Incoming) : Async Unit := incoming.state.atomically do Channel.close' -/-- Checks whether the channel is closed. -/ +/-- +Checks whether the channel is closed. +-/ @[always_inline, inline] def isClosed (incoming : Incoming) : Async Bool := incoming.state.atomically do return (← get).closed -/-- Gets the known size if available. -/ +/-- +Gets the known size if available. +-/ @[always_inline, inline] def getKnownSize (incoming : Incoming) : Async (Option Body.Length) := incoming.state.atomically do return (← get).knownSize -/-- Sets known size metadata. -/ +/-- +Sets known size metadata. +-/ @[always_inline, inline] def setKnownSize (incoming : Incoming) (size : Option Body.Length) : Async Unit := incoming.state.atomically do modify fun st => { st with knownSize := size } open Internal.IO.Async in - /-- Creates a selector that resolves when a producer is waiting (or the channel closes). -/ @@ -319,7 +335,7 @@ protected partial def forIn (step : Chunk → β → Async (ForInStep β)) : Async β := do let rec @[specialize] loop (incoming : Incoming) (acc : β) : Async β := do - if let some chunk ← incoming.recv none then + if let some chunk ← incoming.recv then match ← step chunk acc with | .done res => return res | .yield res => loop incoming res @@ -388,6 +404,7 @@ private def collapseForSend (outgoing : Outgoing) (chunk : Chunk) (incomplete : Bool) : BaseIO (Except IO.Error (Option Chunk)) := do + outgoing.state.atomically do Channel.pruneFinishedWaiters let st ← get @@ -410,10 +427,13 @@ private def collapseForSend set { st with pendingIncompleteChunk := none } return .ok (some merged) --- Returns `some true` = delivered directly, `some false` = consumer race lost (retry), --- `none` = producer installed, caller must await `done`. -private def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do +/- +Returns `some true` = delivered directly, `some false` = consumer race lost (retry), +`none` = producer installed, caller must await `done`. +-/ +private partial def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do let done ← IO.Promise.new + while true do let result : Except IO.Error (Option Bool) ← outgoing.state.atomically do Channel.pruneFinishedWaiters @@ -424,6 +444,7 @@ private def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do if let some consumer := st.pendingConsumer then let success ← consumer.resolve (some chunk) + if success then set { st with @@ -432,7 +453,6 @@ private def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do } return .ok (some true) else - -- Consumer's selector race was lost; clear the stale entry and retry. set { st with pendingConsumer := none } return .ok (some false) else if st.pendingProducer.isSome then @@ -447,11 +467,8 @@ private def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do | .ok (some true) => return () | .ok (some false) => - -- Retry immediately; no sleep needed, no producer installed yet. - pure () + send' outgoing chunk | .ok none => - -- Producer is installed; block until the consumer signals via `done`. - -- `done` resolves with `true` when consumed, `false` when the channel closes. match ← await done.result? with | some true => return () | _ => throw (IO.Error.userError "channel closed") @@ -465,52 +482,51 @@ If `incomplete := false`, any buffered incomplete pieces are collapsed with this single merged chunk is sent. -/ def send (outgoing : Outgoing) (chunk : Chunk) (incomplete : Bool := false) : Async Unit := do - if chunk.data.isEmpty ∧ chunk.extensions.isEmpty then - return - match (← collapseForSend outgoing chunk incomplete) with - | .error err => - throw err - | .ok none => - pure () - | .ok (some toSend) => - send' outgoing toSend - -/-- Alias for `send`. -/ -def writeChunk (outgoing : Outgoing) (chunk : Chunk) : Async Unit := - outgoing.send chunk + | .error err => throw err + | .ok none => pure () + | .ok (some toSend) => send' outgoing toSend -/-- Closes the channel. -/ +/-- +Closes the channel. +-/ def close (outgoing : Outgoing) : Async Unit := outgoing.state.atomically do Channel.close' -/-- Checks whether the channel is closed. -/ +/-- +Checks whether the channel is closed. +-/ @[always_inline, inline] def isClosed (outgoing : Outgoing) : Async Bool := outgoing.state.atomically do return (← get).closed -/-- Returns true when a consumer is currently blocked waiting for data. -/ +/-- +Returns `true` when a consumer is currently blocked waiting for data. +-/ def hasInterest (outgoing : Outgoing) : Async Bool := outgoing.state.atomically do Channel.pruneFinishedWaiters Channel.hasInterest' -/-- Gets the known size if available. -/ +/-- +Gets the known size if available. +-/ @[always_inline, inline] def getKnownSize (outgoing : Outgoing) : Async (Option Body.Length) := outgoing.state.atomically do return (← get).knownSize -/-- Sets known size metadata. -/ +/-- +Sets known size metadata. +-/ @[always_inline, inline] def setKnownSize (outgoing : Outgoing) (size : Option Body.Length) : Async Unit := outgoing.state.atomically do modify fun st => { st with knownSize := size } open Internal.IO.Async in - /-- Creates a selector that resolves when consumer interest is present. Returns `true` when a consumer is waiting, `false` when the channel closes first. @@ -557,12 +573,16 @@ end Outgoing Use these only in HTTP internals where body direction must be adapted. -/ namespace Internal -/-- Reinterprets the receive-side handle as a send-side handle over the same channel. -/ +/-- +Reinterprets the receive-side handle as a send-side handle over the same channel. +-/ @[always_inline, inline] def incomingToOutgoing (incoming : Incoming) : Outgoing := { state := incoming.state } -/-- Reinterprets the send-side handle as a receive-side handle over the same channel. -/ +/-- +Reinterprets the send-side handle as a receive-side handle over the same channel. +-/ @[always_inline, inline] def outgoingToIncoming (outgoing : Outgoing) : Incoming := { state := outgoing.state } diff --git a/src/Std/Internal/Http/Data/Body/Writer.lean b/src/Std/Internal/Http/Data/Body/Writer.lean index 6ac86272dc49..064e09771a9e 100644 --- a/src/Std/Internal/Http/Data/Body/Writer.lean +++ b/src/Std/Internal/Http/Data/Body/Writer.lean @@ -208,9 +208,9 @@ instance : Writer AnyBody where instance : Reader AnyBody where recv - | .outgoing body, count => Reader.recv body count - | .full body, count => Reader.recv body count - | .empty body, count => Reader.recv body count + | .outgoing body => Reader.recv body + | .full body => Reader.recv body + | .empty body => Reader.recv body close | .outgoing body => Reader.close body | .full body => Reader.close body diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 39abadf09ab7..6b56f2779319 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -13,7 +13,7 @@ def channelSendRecv : Async Unit := do let chunk := Chunk.ofByteArray "hello".toUTF8 let sendTask ← async (t := AsyncTask) <| outgoing.send chunk - let result ← incoming.recv none + let result ← incoming.recv assert! result.isSome assert! result.get!.data == "hello".toUTF8 @@ -68,7 +68,7 @@ def channelClose : Async Unit := do def channelRecvAfterClose : Async Unit := do let (outgoing, incoming) ← Body.mkChannel outgoing.close - let result ← incoming.recv none + let result ← incoming.recv assert! result.isNone #eval channelRecvAfterClose.block @@ -99,7 +99,7 @@ def channelExtensions : Async Unit := do let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some (Chunk.ExtensionValue.ofString! "value"))] : Chunk } let sendTask ← async (t := AsyncTask) <| outgoing.send chunk - let result ← incoming.recv none + let result ← incoming.recv assert! result.isSome assert! result.get!.extensions.size == 1 @@ -125,7 +125,7 @@ def channelKnownSizeDecreases : Async Unit := do outgoing.setKnownSize (some (.fixed 5)) let sendTask ← async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "hello".toUTF8) - let _ ← incoming.recv none + let _ ← incoming.recv await sendTask let size ← incoming.getKnownSize @@ -152,7 +152,7 @@ def channelSingleProducerRule : Async Unit := do pure true assert! send2Failed - let first ← incoming.recv none + let first ← incoming.recv assert! first.isSome assert! first.get!.data == "one".toUTF8 @@ -165,7 +165,7 @@ def channelSingleProducerRule : Async Unit := do def channelSingleConsumerRule : Async Unit := do let (outgoing, incoming) ← Body.mkChannel - let recv1 ← async (t := AsyncTask) <| incoming.recv none + let recv1 ← async (t := AsyncTask) <| incoming.recv let hasInterest ← Selectable.one #[ .case outgoing.interestSelector pure @@ -174,7 +174,7 @@ def channelSingleConsumerRule : Async Unit := do let recv2Failed ← try - let _ ← incoming.recv none + let _ ← incoming.recv pure false catch _ => pure true @@ -196,7 +196,7 @@ def channelHasInterest : Async Unit := do let (outgoing, incoming) ← Body.mkChannel assert! !(← outgoing.hasInterest) - let recvTask ← async (t := AsyncTask) <| incoming.recv none + let recvTask ← async (t := AsyncTask) <| incoming.recv let hasInterest ← Selectable.one #[ .case outgoing.interestSelector pure @@ -234,8 +234,8 @@ def channelInterestSelectorClose : Async Unit := do def fullRecvConsumesOnce : Async Unit := do let full ← Body.Full.ofUTF8String "hello" - let first ← full.recv none - let second ← full.recv none + let first ← full.recv + let second ← full.recv assert! first.isSome assert! first.get!.data == "hello".toUTF8 @@ -318,7 +318,7 @@ def emptyWriterSendFails : Async Unit := do /-! ## Request.Builder body tests -/ private def recvBuiltBody (body : Body.Full) : Async (Option Chunk) := - body.recv none + body.recv private def emptyBodyKnownSize (body : Body.Empty) : Async (Option Body.Length) := Writer.getKnownSize body From aa09ab0cd9ecb1027a016c7a3e7d404b0202c500 Mon Sep 17 00:00:00 2001 From: Sofia Rodrigues Date: Fri, 27 Feb 2026 14:52:16 -0300 Subject: [PATCH 44/44] fix: function name --- src/Std/Internal/Http/Data/Body/Full.lean | 2 +- tests/lean/run/async_http_body.lean | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Std/Internal/Http/Data/Body/Full.lean b/src/Std/Internal/Http/Data/Body/Full.lean index 0c33df8c2bbe..8979dca81313 100644 --- a/src/Std/Internal/Http/Data/Body/Full.lean +++ b/src/Std/Internal/Http/Data/Body/Full.lean @@ -71,7 +71,7 @@ def ofByteArray (data : ByteArray) : Async Full := do /-- Creates a `Full` body from a `String`. -/ -def ofUTF8String (data : String) : Async Full := do +def ofString (data : String) : Async Full := do let state ← Mutex.new (some data.toUTF8) return { state } diff --git a/tests/lean/run/async_http_body.lean b/tests/lean/run/async_http_body.lean index 6b56f2779319..cc58f2045dcd 100644 --- a/tests/lean/run/async_http_body.lean +++ b/tests/lean/run/async_http_body.lean @@ -233,7 +233,7 @@ def channelInterestSelectorClose : Async Unit := do -- Test Full.recv returns content once then EOF def fullRecvConsumesOnce : Async Unit := do - let full ← Body.Full.ofUTF8String "hello" + let full ← Body.Full.ofString "hello" let first ← full.recv let second ← full.recv @@ -260,7 +260,7 @@ def fullKnownSizeLifecycle : Async Unit := do -- Test Full.close discards remaining content def fullClose : Async Unit := do - let full ← Body.Full.ofUTF8String "bye" + let full ← Body.Full.ofString "bye" assert! !(← full.isClosed) full.close assert! (← full.isClosed) @@ -271,7 +271,7 @@ def fullClose : Async Unit := do -- Test Full interest API always reports no consumer interest def fullInterest : Async Unit := do - let full ← Body.Full.ofUTF8String "x" + let full ← Body.Full.ofString "x" assert! !(← full.hasInterest) let interested ← Selectable.one #[ .case full.interestSelector pure