5454import org .elasticsearch .common .util .CollectionUtils ;
5555import org .elasticsearch .features .NodeFeature ;
5656import org .elasticsearch .http .HttpBodyTracer ;
57- import org .elasticsearch .http .HttpHandlingSettings ;
5857import org .elasticsearch .http .HttpServerTransport ;
5958import org .elasticsearch .http .HttpTransportSettings ;
6059import org .elasticsearch .plugins .ActionPlugin ;
9392@ ESIntegTestCase .ClusterScope (numDataNodes = 1 )
9493public class Netty4IncrementalRequestHandlingIT extends ESNetty4IntegTestCase {
9594
95+ private static final int MAX_CONTENT_LENGTH = ByteSizeUnit .MB .toIntBytes (50 );
96+
9697 @ Override
9798 protected Settings nodeSettings (int nodeOrdinal , Settings otherSettings ) {
9899 Settings .Builder builder = Settings .builder ().put (super .nodeSettings (nodeOrdinal , otherSettings ));
99- builder .put (HttpTransportSettings .SETTING_HTTP_MAX_CONTENT_LENGTH .getKey (), new ByteSizeValue (50 , ByteSizeUnit .MB ));
100+
101+ builder .put (
102+ HttpTransportSettings .SETTING_HTTP_MAX_CONTENT_LENGTH .getKey (),
103+ new ByteSizeValue (MAX_CONTENT_LENGTH , ByteSizeUnit .BYTES )
104+ );
100105 return builder .build ();
101106 }
102107
@@ -135,7 +140,7 @@ public void testReceiveAllChunks() throws Exception {
135140 var opaqueId = opaqueId (reqNo );
136141
137142 // this dataset will be compared with one on server side
138- var dataSize = randomIntBetween (1024 , maxContentLength () );
143+ var dataSize = randomIntBetween (1024 , MAX_CONTENT_LENGTH );
139144 var sendData = Unpooled .wrappedBuffer (randomByteArrayOfLength (dataSize ));
140145 sendData .retain ();
141146 ctx .clientChannel .writeAndFlush (fullHttpRequest (opaqueId , sendData ));
@@ -243,7 +248,7 @@ public void testServerExceptionMidStream() throws Exception {
243248 public void testClientBackpressure () throws Exception {
244249 try (var ctx = setupClientCtx ()) {
245250 var opaqueId = opaqueId (0 );
246- var payloadSize = maxContentLength () ;
251+ var payloadSize = MAX_CONTENT_LENGTH ;
247252 var totalParts = 10 ;
248253 var partSize = payloadSize / totalParts ;
249254 ctx .clientChannel .writeAndFlush (httpRequest (opaqueId , payloadSize ));
@@ -285,7 +290,7 @@ public void test100Continue() throws Exception {
285290 try (var ctx = setupClientCtx ()) {
286291 for (int reqNo = 0 ; reqNo < randomIntBetween (2 , 10 ); reqNo ++) {
287292 var id = opaqueId (reqNo );
288- var acceptableContentLength = randomIntBetween (0 , maxContentLength () );
293+ var acceptableContentLength = randomIntBetween (0 , MAX_CONTENT_LENGTH );
289294
290295 // send request header and await 100-continue
291296 var req = httpRequest (id , acceptableContentLength );
@@ -317,7 +322,7 @@ public void test413TooLargeOnExpect100Continue() throws Exception {
317322 try (var ctx = setupClientCtx ()) {
318323 for (int reqNo = 0 ; reqNo < randomIntBetween (2 , 10 ); reqNo ++) {
319324 var id = opaqueId (reqNo );
320- var oversized = maxContentLength () + 1 ;
325+ var oversized = MAX_CONTENT_LENGTH + 1 ;
321326
322327 // send request header and await 413 too large
323328 var req = httpRequest (id , oversized );
@@ -333,32 +338,28 @@ public void test413TooLargeOnExpect100Continue() throws Exception {
333338 }
334339 }
335340
336- // ensures that oversized chunked encoded request has no limits at http layer
337- // rest handler is responsible for oversized requests
338- public void testOversizedChunkedEncodingNoLimits () throws Exception {
341+ // ensures that oversized chunked encoded request has maxContentLength limit and returns 413
342+ public void testOversizedChunkedEncoding () throws Exception {
339343 try (var ctx = setupClientCtx ()) {
340- for (var reqNo = 0 ; reqNo < randomIntBetween (2 , 10 ); reqNo ++) {
341- var id = opaqueId (reqNo );
342- var contentSize = maxContentLength () + 1 ;
343- var content = randomByteArrayOfLength (contentSize );
344- var is = new ByteBufInputStream (Unpooled .wrappedBuffer (content ));
345- var chunkedIs = new ChunkedStream (is );
346- var httpChunkedIs = new HttpChunkedInput (chunkedIs , LastHttpContent .EMPTY_LAST_CONTENT );
347- var req = httpRequest (id , 0 );
348- HttpUtil .setTransferEncodingChunked (req , true );
349-
350- ctx .clientChannel .pipeline ().addLast (new ChunkedWriteHandler ());
351- ctx .clientChannel .writeAndFlush (req );
352- ctx .clientChannel .writeAndFlush (httpChunkedIs );
353- var handler = ctx .awaitRestChannelAccepted (id );
354- var consumed = handler .readAllBytes ();
355- assertEquals (contentSize , consumed );
356- handler .sendResponse (new RestResponse (RestStatus .OK , "" ));
357-
358- var resp = (FullHttpResponse ) safePoll (ctx .clientRespQueue );
359- assertEquals (HttpResponseStatus .OK , resp .status ());
360- resp .release ();
361- }
344+ var id = opaqueId (0 );
345+ var contentSize = MAX_CONTENT_LENGTH + 1 ;
346+ var content = randomByteArrayOfLength (contentSize );
347+ var is = new ByteBufInputStream (Unpooled .wrappedBuffer (content ));
348+ var chunkedIs = new ChunkedStream (is );
349+ var httpChunkedIs = new HttpChunkedInput (chunkedIs , LastHttpContent .EMPTY_LAST_CONTENT );
350+ var req = httpRequest (id , 0 );
351+ HttpUtil .setTransferEncodingChunked (req , true );
352+
353+ ctx .clientChannel .pipeline ().addLast (new ChunkedWriteHandler ());
354+ ctx .clientChannel .writeAndFlush (req );
355+ ctx .clientChannel .writeAndFlush (httpChunkedIs );
356+ var handler = ctx .awaitRestChannelAccepted (id );
357+ var consumed = handler .readAllBytes ();
358+ assertTrue (consumed <= MAX_CONTENT_LENGTH );
359+
360+ var resp = (FullHttpResponse ) safePoll (ctx .clientRespQueue );
361+ assertEquals (HttpResponseStatus .REQUEST_ENTITY_TOO_LARGE , resp .status ());
362+ resp .release ();
362363 }
363364 }
364365
@@ -369,7 +370,7 @@ public void testBadRequestReleaseQueuedChunks() throws Exception {
369370 try (var ctx = setupClientCtx ()) {
370371 for (var reqNo = 0 ; reqNo < randomIntBetween (2 , 10 ); reqNo ++) {
371372 var id = opaqueId (reqNo );
372- var contentSize = randomIntBetween (0 , maxContentLength () );
373+ var contentSize = randomIntBetween (0 , MAX_CONTENT_LENGTH );
373374 var req = httpRequest (id , contentSize );
374375 var content = randomContent (contentSize , true );
375376
@@ -405,7 +406,7 @@ public void testHttpClientStats() throws Exception {
405406
406407 for (var reqNo = 0 ; reqNo < randomIntBetween (2 , 10 ); reqNo ++) {
407408 var id = opaqueId (reqNo );
408- var contentSize = randomIntBetween (0 , maxContentLength () );
409+ var contentSize = randomIntBetween (0 , MAX_CONTENT_LENGTH );
409410 totalBytesSent += contentSize ;
410411 ctx .clientChannel .writeAndFlush (httpRequest (id , contentSize ));
411412 ctx .clientChannel .writeAndFlush (randomContent (contentSize , true ));
@@ -485,10 +486,6 @@ private void assertHttpBodyLogging(Function<Ctx, Runnable> test) throws Exceptio
485486 }
486487 }
487488
488- private int maxContentLength () {
489- return HttpHandlingSettings .fromSettings (internalCluster ().getInstance (Settings .class )).maxContentLength ();
490- }
491-
492489 private String opaqueId (int reqNo ) {
493490 return getTestName () + "-" + reqNo ;
494491 }
@@ -658,14 +655,22 @@ void sendResponse(RestResponse response) {
658655 int readBytes (int bytes ) {
659656 var consumed = 0 ;
660657 if (recvLast == false ) {
661- while (consumed < bytes ) {
662- stream .next ();
663- var recvChunk = safePoll (recvChunks );
664- consumed += recvChunk .chunk .length ();
665- recvChunk .chunk .close ();
666- if (recvChunk .isLast ) {
667- recvLast = true ;
668- break ;
658+ stream .next ();
659+ while (consumed < bytes && streamClosed == false ) {
660+ try {
661+ var recvChunk = recvChunks .poll (10 , TimeUnit .MILLISECONDS );
662+ if (recvChunk != null ) {
663+ consumed += recvChunk .chunk .length ();
664+ recvChunk .chunk .close ();
665+ if (recvChunk .isLast ) {
666+ recvLast = true ;
667+ break ;
668+ }
669+ stream .next ();
670+ }
671+ } catch (InterruptedException e ) {
672+ Thread .currentThread ().interrupt ();
673+ throw new AssertionError (e );
669674 }
670675 }
671676 }
0 commit comments