Skip to content

Commit db010d1

Browse files
authored
Merge pull request #376 from rpmoore/in_cache_fix
Adding a regression test and fix to validate that items already in ca…
2 parents 375c18d + 8e29ad4 commit db010d1

File tree

4 files changed

+78
-15
lines changed

4 files changed

+78
-15
lines changed

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ The SDK can also be included directly into a Maven or Gradle build. There is als
4141
<dependency>
4242
<groupId>com.spectralogic.ds3</groupId>
4343
<artifactId>ds3-sdk</artifactId>
44-
<version>3.2.7</version>
44+
<version>3.2.8</version>
4545
<!-- <classifier>all</classifier> -->
4646
</dependency>
4747
...
@@ -64,8 +64,8 @@ repositories {
6464
6565
dependencies {
6666
...
67-
compile 'com.spectralogic.ds3:ds3-sdk:3.2.7'
68-
// compile 'com.spectralogic.ds3:ds3-sdk:3.2.7:all'
67+
compile 'com.spectralogic.ds3:ds3-sdk:3.2.8'
68+
// compile 'com.spectralogic.ds3:ds3-sdk:3.2.8:all'
6969
...
7070
}
7171

build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
allprojects {
1717
group = 'com.spectralogic.ds3'
18-
version = '3.2.7'
18+
version = '3.2.8'
1919
}
2020

2121
subprojects {

ds3-sdk-integration/src/test/java/com/spectralogic/ds3client/integration/Regression_Test.java

Lines changed: 67 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,30 +18,39 @@
1818
import com.google.common.collect.Iterables;
1919
import com.google.common.collect.Lists;
2020
import com.spectralogic.ds3client.Ds3Client;
21-
import com.spectralogic.ds3client.commands.spectrads3.CancelJobSpectraS3Request;
22-
import com.spectralogic.ds3client.commands.spectrads3.CancelJobSpectraS3Response;
23-
import com.spectralogic.ds3client.commands.spectrads3.GetJobChunksReadyForClientProcessingSpectraS3Request;
21+
import com.spectralogic.ds3client.commands.PutObjectRequest;
22+
import com.spectralogic.ds3client.commands.PutObjectResponse;
23+
import com.spectralogic.ds3client.commands.spectrads3.*;
2424
import com.spectralogic.ds3client.helpers.Ds3ClientHelpers;
25+
import com.spectralogic.ds3client.helpers.JobRecoveryException;
2526
import com.spectralogic.ds3client.integration.test.helpers.TempStorageIds;
2627
import com.spectralogic.ds3client.integration.test.helpers.TempStorageUtil;
28+
import com.spectralogic.ds3client.models.BulkObject;
2729
import com.spectralogic.ds3client.models.ChecksumType;
2830
import com.spectralogic.ds3client.models.Contents;
31+
import com.spectralogic.ds3client.models.Objects;
2932
import com.spectralogic.ds3client.models.bulk.Ds3Object;
3033
import com.spectralogic.ds3client.networking.FailedRequestException;
34+
import com.spectralogic.ds3client.utils.ResourceUtils;
3135
import org.junit.AfterClass;
3236
import org.junit.BeforeClass;
3337
import org.junit.Test;
3438
import org.slf4j.Logger;
3539
import org.slf4j.LoggerFactory;
3640

3741
import java.io.IOException;
42+
import java.net.URISyntaxException;
3843
import java.nio.channels.SeekableByteChannel;
44+
import java.nio.file.Files;
45+
import java.nio.file.Path;
46+
import java.nio.file.StandardOpenOption;
3947
import java.util.Collections;
4048
import java.util.List;
4149
import java.util.UUID;
4250

43-
import static org.hamcrest.CoreMatchers.is;
44-
import static org.hamcrest.CoreMatchers.notNullValue;
51+
import static com.spectralogic.ds3client.integration.Util.RESOURCE_BASE_NAME;
52+
import static com.spectralogic.ds3client.integration.Util.deleteAllContents;
53+
import static org.hamcrest.CoreMatchers.*;
4554
import static org.junit.Assert.*;
4655

4756
public class Regression_Test {
@@ -95,7 +104,7 @@ public void testMarkerWithSpaces() throws IOException {
95104
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
96105
assertEquals(204, cancelJobResponse.getStatusCode());
97106
} finally {
98-
Util.deleteAllContents(client, bucketName);
107+
deleteAllContents(client, bucketName);
99108
}
100109
}
101110

@@ -131,7 +140,7 @@ public void testPrefixWithSpaces() throws IOException {
131140
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
132141
assertEquals(204, cancelJobResponse.getStatusCode());
133142
} finally {
134-
Util.deleteAllContents(client, bucketName);
143+
deleteAllContents(client, bucketName);
135144
}
136145
}
137146

@@ -180,7 +189,7 @@ public void testPrefixForDirectoriesWithSpaces() throws IOException {
180189
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
181190
assertEquals(204, cancelJobResponse.getStatusCode());
182191
} finally {
183-
Util.deleteAllContents(client, bucketName);
192+
deleteAllContents(client, bucketName);
184193
}
185194
}
186195

@@ -228,7 +237,7 @@ public void testPrefixForNestedDirectories() throws IOException {
228237
.cancelJobSpectraS3(new CancelJobSpectraS3Request(putJob.getJobId().toString()));
229238
assertEquals(204, cancelJobResponse.getStatusCode());
230239
} finally {
231-
Util.deleteAllContents(client, bucketName);
240+
deleteAllContents(client, bucketName);
232241
}
233242
}
234243

@@ -261,8 +270,56 @@ public SeekableByteChannel buildChannel(final String key) throws IOException {
261270
}
262271
});
263272
} finally {
264-
Util.deleteAllContents(client, bucketName);
273+
deleteAllContents(client, bucketName);
265274
}
275+
}
276+
277+
@Test
278+
public void testRecoverWriteJobWithHelper() throws IOException, JobRecoveryException, URISyntaxException {
279+
final String bucketName = "test_recover_write_job_bucket";
280+
final String book1 = "beowulf.txt";
281+
final String book2 = "ulysses.txt";
282+
283+
try {
284+
HELPERS.ensureBucketExists(bucketName, envDataPolicyId);
266285

286+
final Path objPath1 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book1);
287+
final Path objPath2 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book2);
288+
final Ds3Object obj1 = new Ds3Object(book1, Files.size(objPath1));
289+
final Ds3Object obj2 = new Ds3Object(book2, Files.size(objPath2));
290+
291+
final Ds3ClientHelpers.Job job = Ds3ClientHelpers.wrap(client).startWriteJob(bucketName, Lists.newArrayList(obj1, obj2));
292+
293+
final PutObjectResponse putResponse1 = client.putObject(new PutObjectRequest(
294+
job.getBucketName(),
295+
book1,
296+
new ResourceObjectPutter(RESOURCE_BASE_NAME).buildChannel(book1),
297+
job.getJobId().toString(),
298+
0,
299+
Files.size(objPath1)));
300+
assertThat(putResponse1, is(notNullValue()));
301+
assertThat(putResponse1.getStatusCode(), is(equalTo(200)));
302+
303+
// Interuption...
304+
final Ds3ClientHelpers.Job recoverJob = HELPERS.recoverWriteJob(job.getJobId());
305+
306+
recoverJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
307+
@Override
308+
public SeekableByteChannel buildChannel(final String key) throws IOException {
309+
return Files.newByteChannel(objPath2, StandardOpenOption.READ);
310+
}
311+
});
312+
313+
final GetJobSpectraS3Response finishedJob = client.getJobSpectraS3(new GetJobSpectraS3Request(job.getJobId()));
314+
315+
for (final Objects objects : finishedJob.getMasterObjectListResult().getObjects()) {
316+
for (final BulkObject bulkObject : objects.getObjects()) {
317+
assertTrue(bulkObject.getInCache());
318+
}
319+
}
320+
321+
} finally {
322+
deleteAllContents(client, bucketName);
323+
}
267324
}
268325
}

ds3-sdk/src/main/java/com/spectralogic/ds3client/helpers/strategy/PutStreamerStrategy.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
package com.spectralogic.ds3client.helpers.strategy;
1717

1818
import com.google.common.base.Function;
19+
import com.google.common.base.Predicate;
1920
import com.google.common.collect.FluentIterable;
2021
import com.google.common.collect.ImmutableMap;
2122
import com.spectralogic.ds3client.Ds3Client;
@@ -60,7 +61,12 @@ public Iterable<JobPart> getWork() throws IOException, InterruptedException {
6061
final Objects nextChunk = allocateChunk(filteredChunkIterator.next());
6162

6263
LOG.debug("Allocating chunk: {}", nextChunk.getChunkId().toString());
63-
return FluentIterable.from(nextChunk.getObjects()).transform(new Function<BulkObject, JobPart>() {
64+
return FluentIterable.from(nextChunk.getObjects()).filter(new Predicate<BulkObject>() {
65+
@Override
66+
public boolean apply(@Nullable final BulkObject input) {
67+
return !input.getInCache();
68+
}
69+
}).transform(new Function<BulkObject, JobPart>() {
6470
@Nullable
6571
@Override
6672
public JobPart apply(@Nullable final BulkObject input) {

0 commit comments

Comments
 (0)