-
Notifications
You must be signed in to change notification settings - Fork 25.6k
Refactor gcp-fixture multipart parser #125828
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 6 commits
2fb738d
28d4481
4ef1236
5105d4c
0dfddd6
1008194
771e069
4eb8b23
3381864
f8eb315
f3210e4
9714b32
019a04f
b4869f5
47912f5
8de1110
1505b48
b7ef093
905620d
efda0e7
797d27d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -18,7 +18,6 @@ | |
| import org.elasticsearch.common.io.Streams; | ||
| import org.elasticsearch.common.regex.Regex; | ||
| import org.elasticsearch.core.SuppressForbidden; | ||
| import org.elasticsearch.core.Tuple; | ||
| import org.elasticsearch.rest.RestStatus; | ||
| import org.elasticsearch.rest.RestUtils; | ||
| import org.elasticsearch.test.fixture.HttpHeaderParser; | ||
|
|
@@ -27,26 +26,17 @@ | |
| import org.elasticsearch.xcontent.XContentFactory; | ||
| import org.elasticsearch.xcontent.XContentType; | ||
|
|
||
| import java.io.BufferedReader; | ||
| import java.io.IOException; | ||
| import java.io.InputStream; | ||
| import java.io.InputStreamReader; | ||
| import java.net.URLDecoder; | ||
| import java.util.HashMap; | ||
| import java.util.Locale; | ||
| import java.util.Map; | ||
| import java.util.Objects; | ||
| import java.util.Optional; | ||
| import java.util.concurrent.atomic.AtomicInteger; | ||
| import java.util.regex.Matcher; | ||
| import java.util.regex.Pattern; | ||
| import java.util.stream.Collectors; | ||
| import java.util.zip.GZIPInputStream; | ||
|
|
||
| import static fixture.gcs.MockGcsBlobStore.failAndThrow; | ||
| import static java.nio.charset.StandardCharsets.UTF_8; | ||
| import static java.util.stream.Collectors.joining; | ||
| import static org.elasticsearch.core.Strings.format; | ||
|
|
||
| /** | ||
| * Minimal HTTP handler that acts as a Google Cloud Storage compliant server | ||
|
|
@@ -186,26 +176,14 @@ public void handle(final HttpExchange exchange) throws IOException { | |
| exchange.getResponseBody().write(response); | ||
|
|
||
| } else if (Regex.simpleMatch("POST /upload/storage/v1/b/" + bucket + "/*uploadType=multipart*", request)) { | ||
| // Multipart upload | ||
| Optional<Tuple<String, BytesReference>> content = parseMultipartRequestBody(requestBody.streamInput()); | ||
| if (content.isPresent()) { | ||
| final Long ifGenerationMatch = parseOptionalLongParameter(exchange, IF_GENERATION_MATCH); | ||
| final MockGcsBlobStore.BlobVersion newBlobVersion = mockGcsBlobStore.updateBlob( | ||
| content.get().v1(), | ||
| ifGenerationMatch, | ||
| content.get().v2() | ||
| ); | ||
| writeBlobVersionAsJson(exchange, newBlobVersion); | ||
| } else { | ||
| throw new AssertionError( | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I would like to keep the behaviour where an invalid body causes a test failure rather than just an
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Throw AssertionError errors from the MultipartUpload.parseBody now.
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I was sort of hoping to see tests that ensured that this kind of malformed body would throw an IAE from the parser. Throwing |
||
| "Could not read multi-part request to [" | ||
| + request | ||
| + "] with headers [" | ||
| + new HashMap<>(exchange.getRequestHeaders()) | ||
| + "]" | ||
| ); | ||
| } | ||
|
|
||
| final var multipartUpload = MultipartUpload.parseBody(exchange, requestBody.streamInput()); | ||
| final Long ifGenerationMatch = parseOptionalLongParameter(exchange, IF_GENERATION_MATCH); | ||
| final MockGcsBlobStore.BlobVersion newBlobVersion = mockGcsBlobStore.updateBlob( | ||
| multipartUpload.name(), | ||
| ifGenerationMatch, | ||
| multipartUpload.content() | ||
| ); | ||
| writeBlobVersionAsJson(exchange, newBlobVersion); | ||
| } else if (Regex.simpleMatch("POST /upload/storage/v1/b/" + bucket + "/*uploadType=resumable*", request)) { | ||
| // Resumable upload initialization https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload | ||
| final Map<String, String> params = new HashMap<>(); | ||
|
|
@@ -331,81 +309,6 @@ private static String httpServerUrl(final HttpExchange exchange) { | |
| return "http://" + exchange.getRequestHeaders().get("HOST").get(0); | ||
| } | ||
|
|
||
| private static final Pattern NAME_PATTERN = Pattern.compile("\"name\":\"([^\"]*)\""); | ||
|
|
||
| public static Optional<Tuple<String, BytesReference>> parseMultipartRequestBody(final InputStream requestBody) throws IOException { | ||
| Tuple<String, BytesReference> content = null; | ||
| final BytesReference fullRequestBody; | ||
| try (InputStream in = new GZIPInputStream(requestBody)) { | ||
| fullRequestBody = Streams.readFully(in); | ||
| } | ||
| String name = null; | ||
| boolean skippedEmptyLine = false; | ||
| int startPos = 0; | ||
| int endPos = 0; | ||
| while (startPos < fullRequestBody.length()) { | ||
| do { | ||
| endPos = fullRequestBody.indexOf((byte) '\r', endPos + 1); | ||
| } while (endPos >= 0 && fullRequestBody.get(endPos + 1) != '\n'); | ||
| boolean markAndContinue = false; | ||
| final String bucketPrefix = "{\"bucket\":"; | ||
| if (startPos > 0) { | ||
| startPos += 2; | ||
| } | ||
| if (name == null || skippedEmptyLine == false) { | ||
| if ((skippedEmptyLine == false && endPos == startPos) | ||
| || (fullRequestBody.get(startPos) == '-' && fullRequestBody.get(startPos + 1) == '-')) { | ||
| markAndContinue = true; | ||
| } else { | ||
| final String start = fullRequestBody.slice(startPos, Math.min(endPos - startPos, bucketPrefix.length())).utf8ToString(); | ||
| if (start.toLowerCase(Locale.ROOT).startsWith("content")) { | ||
| markAndContinue = true; | ||
| } else if (start.startsWith(bucketPrefix)) { | ||
| markAndContinue = true; | ||
| final String line = fullRequestBody.slice( | ||
| startPos + bucketPrefix.length(), | ||
| endPos - startPos - bucketPrefix.length() | ||
| ).utf8ToString(); | ||
| Matcher matcher = NAME_PATTERN.matcher(line); | ||
| if (matcher.find()) { | ||
| name = matcher.group(1); | ||
| } | ||
| } | ||
| } | ||
| skippedEmptyLine = markAndContinue && endPos == startPos; | ||
| startPos = endPos; | ||
| } else { | ||
| while (isEndOfPart(fullRequestBody, endPos) == false) { | ||
| endPos = fullRequestBody.indexOf((byte) '\r', endPos + 1); | ||
| } | ||
| content = Tuple.tuple(name, fullRequestBody.slice(startPos, endPos - startPos)); | ||
| break; | ||
| } | ||
| } | ||
| if (content == null) { | ||
| final InputStream stream = fullRequestBody.streamInput(); | ||
| logger.warn( | ||
| () -> format( | ||
| "Failed to find multi-part upload in [%s]", | ||
| new BufferedReader(new InputStreamReader(stream)).lines().collect(joining("\n")) | ||
| ) | ||
| ); | ||
| } | ||
| return Optional.ofNullable(content); | ||
| } | ||
|
|
||
| private static final byte[] END_OF_PARTS_MARKER = "\r\n--__END_OF_PART__".getBytes(UTF_8); | ||
|
|
||
| private static boolean isEndOfPart(BytesReference fullRequestBody, int endPos) { | ||
| for (int i = 0; i < END_OF_PARTS_MARKER.length; i++) { | ||
| final byte b = END_OF_PARTS_MARKER[i]; | ||
| if (fullRequestBody.get(endPos + i) != b) { | ||
| return false; | ||
| } | ||
| } | ||
| return true; | ||
| } | ||
|
|
||
| private static String requireHeader(HttpExchange exchange, String headerName) { | ||
| final String headerValue = exchange.getRequestHeaders().getFirst(headerName); | ||
| if (headerValue != null) { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It's legitimate to upload an empty blob comprising a single empty part isn't it? Certainly is in S3. We should be covering this case in these tests.
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This test explicitly set content length to be at least 1 byte long. Empty content is not expected.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It's legitimate to send 0 byte blob. It will produce two body parts, json metadata and empty part with headers.
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think we could do away with that assertion (it might trip in the event someone changes
randomBlobContent()down the track). It's not important for the test. I think the previous assertion (assertThat(content, isPresent());) was only there because the oldparseMultipartRequestBodyreturnedOptional.empty()when the parse failed.I don't have strong feelings about it but given we check that the content equals the uploaded bytes further down it seems redundant?
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Changed test with min-size 0 and removed content length assertion b4869f5. Manually tested with 0 sized blob, it works.