Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
import com.amazonaws.DnsResolver;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.internal.MD5DigestCalculatingInputStream;
import com.amazonaws.util.Base16;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;

Expand All @@ -25,7 +23,9 @@
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.OperationPurpose;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.lucene.store.ByteArrayIndexInput;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
Expand Down Expand Up @@ -365,13 +365,12 @@ public void testWriteLargeBlob() throws Exception {
}
} else if (s3Request.isUploadPartRequest()) {
// upload part request
MD5DigestCalculatingInputStream md5 = new MD5DigestCalculatingInputStream(exchange.getRequestBody());
BytesReference bytes = Streams.readFully(md5);
BytesReference bytes = Streams.readFully(exchange.getRequestBody());
assertThat((long) bytes.length(), anyOf(equalTo(lastPartSize), equalTo(bufferSize.getBytes())));
assertThat(contentLength, anyOf(equalTo(lastPartSize), equalTo(bufferSize.getBytes())));

if (countDownUploads.decrementAndGet() % 2 == 0) {
exchange.getResponseHeaders().add("ETag", Base16.encodeAsString(md5.getMd5Digest()));
exchange.getResponseHeaders().add("ETag", getBase16MD5Digest(bytes));
exchange.sendResponseHeaders(HttpStatus.SC_OK, -1);
exchange.close();
return;
Expand Down Expand Up @@ -463,12 +462,11 @@ public void testWriteLargeBlobStreaming() throws Exception {
}
} else if (s3Request.isUploadPartRequest()) {
// upload part request
MD5DigestCalculatingInputStream md5 = new MD5DigestCalculatingInputStream(exchange.getRequestBody());
BytesReference bytes = Streams.readFully(md5);
BytesReference bytes = Streams.readFully(exchange.getRequestBody());

if (counterUploads.incrementAndGet() % 2 == 0) {
bytesReceived.addAndGet(bytes.length());
exchange.getResponseHeaders().add("ETag", Base16.encodeAsString(md5.getMd5Digest()));
exchange.getResponseHeaders().add("ETag", getBase16MD5Digest(bytes));
exchange.sendResponseHeaders(HttpStatus.SC_OK, -1);
exchange.close();
return;
Expand Down Expand Up @@ -859,6 +857,21 @@ public void testTrimmedLogAndCappedSuppressedErrorOnMultiObjectDeletionException
}
}

private static String getBase16MD5Digest(BytesReference bytesReference) {
return MessageDigests.toHexString(MessageDigests.digest(bytesReference, MessageDigests.md5()));
}

public void testGetBase16MD5Digest() {
// from Wikipedia, see also org.elasticsearch.common.hash.MessageDigestsTests.testMd5
assertBase16MD5Digest("", "d41d8cd98f00b204e9800998ecf8427e");
assertBase16MD5Digest("The quick brown fox jumps over the lazy dog", "9e107d9d372bb6826bd81d3542a419d6");
assertBase16MD5Digest("The quick brown fox jumps over the lazy dog.", "e4d909c290d0fb1ca068ffaddf22cbd0");
}

private static void assertBase16MD5Digest(String input, String expectedDigestString) {
assertEquals(expectedDigestString, getBase16MD5Digest(new BytesArray(input)));
}

@Override
protected Matcher<Integer> getMaxRetriesMatcher(int maxRetries) {
// some attempts make meaningful progress and do not count towards the max retry limit
Expand Down