|
1 | 1 | (ns aleph.http.multipart
|
2 | 2 | (:require
|
3 |
| - [clojure.core :as cc] |
4 |
| - [byte-streams :as bs] |
5 |
| - [aleph.http.encoding :refer [encode]] |
6 |
| - [aleph.netty :as netty]) |
| 3 | + [clojure.core :as cc] |
| 4 | + [byte-streams :as bs] |
| 5 | + [aleph.http.encoding :refer [encode]] |
| 6 | + [aleph.http.core :as http-core] |
| 7 | + [aleph.netty :as netty] |
| 8 | + [manifold.stream :as s] |
| 9 | + [clojure.tools.logging :as log] |
| 10 | + [manifold.deferred :as d]) |
7 | 11 | (:import
|
8 |
| - [java.util |
9 |
| - Locale] |
10 |
| - [java.io |
11 |
| - File] |
12 |
| - [java.nio |
13 |
| - ByteBuffer] |
14 |
| - [java.nio.charset |
15 |
| - Charset] |
16 |
| - [java.net |
17 |
| - URLConnection] |
18 |
| - [io.netty.util.internal |
19 |
| - ThreadLocalRandom] |
20 |
| - [io.netty.handler.codec.http |
21 |
| - DefaultHttpRequest |
22 |
| - FullHttpRequest |
23 |
| - HttpConstants] |
24 |
| - [io.netty.handler.codec.http.multipart |
25 |
| - HttpPostRequestEncoder |
26 |
| - MemoryAttribute])) |
| 12 | + [java.util |
| 13 | + Locale] |
| 14 | + [java.io |
| 15 | + File] |
| 16 | + [java.nio |
| 17 | + ByteBuffer] |
| 18 | + [java.nio.charset |
| 19 | + Charset] |
| 20 | + [java.net |
| 21 | + URLConnection] |
| 22 | + [io.netty.util.internal |
| 23 | + ThreadLocalRandom] |
| 24 | + [io.netty.handler.codec.http |
| 25 | + DefaultHttpContent |
| 26 | + DefaultHttpRequest |
| 27 | + FullHttpRequest |
| 28 | + HttpConstants] |
| 29 | + [io.netty.handler.codec.http.multipart |
| 30 | + Attribute |
| 31 | + MemoryAttribute |
| 32 | + FileUpload |
| 33 | + HttpDataFactory |
| 34 | + DefaultHttpDataFactory |
| 35 | + HttpPostRequestDecoder |
| 36 | + HttpPostRequestEncoder |
| 37 | + InterfaceHttpData |
| 38 | + InterfaceHttpData$HttpDataType])) |
27 | 39 |
|
28 | 40 | (defn boundary []
|
29 | 41 | (-> (ThreadLocalRandom/current) .nextLong Long/toHexString .toLowerCase))
|
|
146 | 158 | (.addBodyHttpData encoder attr))))
|
147 | 159 | (let [req' (.finalizeRequest encoder)]
|
148 | 160 | [req' (when (.isChunked encoder) encoder)])))
|
| 161 | + |
| 162 | +(defmulti http-data->map |
| 163 | + (fn [^InterfaceHttpData data] |
| 164 | + (.getHttpDataType data))) |
| 165 | + |
| 166 | +(defmethod http-data->map InterfaceHttpData$HttpDataType/Attribute |
| 167 | + [^Attribute attr] |
| 168 | + (let [content (.getValue attr)] |
| 169 | + {:part-name (.getName attr) |
| 170 | + :content content |
| 171 | + :name nil |
| 172 | + :charset (-> attr .getCharset .toString) |
| 173 | + :mime-type nil |
| 174 | + :transfer-encoding nil |
| 175 | + :memory? (.isInMemory attr) |
| 176 | + :file? false |
| 177 | + :file nil |
| 178 | + :size (count content)})) |
| 179 | + |
| 180 | +(defmethod http-data->map InterfaceHttpData$HttpDataType/FileUpload |
| 181 | + [^FileUpload data] |
| 182 | + (let [memory? (.isInMemory data)] |
| 183 | + {:part-name (.getName data) |
| 184 | + :content (when memory? |
| 185 | + (bs/to-input-stream (netty/acquire (.content data)))) |
| 186 | + :name (.getFilename data) |
| 187 | + :charset (-> data .getCharset .toString) |
| 188 | + :mime-type (.getContentType data) |
| 189 | + :transfer-encoding (.getContentTransferEncoding data) |
| 190 | + :memory? memory? |
| 191 | + :file? true |
| 192 | + :file (when-not memory? (.getFile data)) |
| 193 | + :size (.length data)})) |
| 194 | + |
| 195 | +(defn- read-attributes [^HttpPostRequestDecoder decoder parts] |
| 196 | + (while (.hasNext decoder) |
| 197 | + (s/put! parts (http-data->map (.next decoder))))) |
| 198 | + |
| 199 | +(defn decode-request |
| 200 | + "Takes a ring request and returns a manifold stream which yields |
| 201 | + parts of the mutlipart/form-data encoded body. In case the size of |
| 202 | + a part content exceeds `:memory-limit` limit (16KB by default), |
| 203 | + corresponding payload would be written to a temp file. Check `:memory?` |
| 204 | + flag to know whether content might be read directly from `:content` or |
| 205 | + should be fetched from the file specified in `:file`. |
| 206 | +
|
| 207 | + Note, that if your handler works with multipart requests only, |
| 208 | + it's better to set `:raw-stream?` to `true` to avoid additional |
| 209 | + input stream coercion." |
| 210 | + ([req] (decode-request req {})) |
| 211 | + ([{:keys [body] :as req} |
| 212 | + {:keys [body-buffer-size |
| 213 | + memory-limit] |
| 214 | + :or {body-buffer-size 65536 |
| 215 | + memory-limit DefaultHttpDataFactory/MINSIZE}}] |
| 216 | + (let [body (if (s/stream? body) |
| 217 | + body |
| 218 | + (netty/to-byte-buf-stream body body-buffer-size)) |
| 219 | + destroyed? (atom false) |
| 220 | + req' (http-core/ring-request->netty-request req) |
| 221 | + factory (DefaultHttpDataFactory. (long memory-limit)) |
| 222 | + decoder (HttpPostRequestDecoder. factory req') |
| 223 | + parts (s/stream)] |
| 224 | + |
| 225 | + ;; on each HttpContent chunk, put it into the decoder |
| 226 | + ;; and resume our attempts to get the next attribute available |
| 227 | + (s/connect-via |
| 228 | + body |
| 229 | + (fn [chunk] |
| 230 | + (let [content (DefaultHttpContent. chunk)] |
| 231 | + (.offer decoder content) |
| 232 | + (read-attributes decoder parts) |
| 233 | + ;; note, that releasing chunk right here relies on |
| 234 | + ;; the internals of the decoder. in case those |
| 235 | + ;; internal are changed in future, this flow of |
| 236 | + ;; manipulations should be also reconsidered |
| 237 | + (netty/release chunk) |
| 238 | + (d/success-deferred true))) |
| 239 | + parts) |
| 240 | + |
| 241 | + (s/on-closed |
| 242 | + parts |
| 243 | + (fn [] |
| 244 | + (when (compare-and-set! destroyed? false true) |
| 245 | + (try |
| 246 | + (.destroy decoder) |
| 247 | + (catch Exception e |
| 248 | + (log/warn e "exception when cleaning up multipart decoder")))))) |
| 249 | + |
| 250 | + parts))) |
0 commit comments