Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions .github/workflows/gradle-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,11 @@ on: workflow_dispatch
jobs:
build:
runs-on: ubuntu-latest

services:
localstack:
image: localstack/localstack:latest
ports:
- 4566:4566
steps:
- uses: actions/checkout@v3
with:
Expand All @@ -34,4 +38,5 @@ jobs:
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
run: bash ./release.sh
run: ./bin/release.sh
shell: bash
34 changes: 34 additions & 0 deletions .github/workflows/pull-request-verifiy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Copyright 2022 Adobe. All rights reserved.
# This file is licensed to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.

name: Verify Pull Request

on: pull_request
jobs:
build:
runs-on: ubuntu-latest
services:
localstack:
image: localstack/localstack:latest
ports:
- 4566:4566
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up JDK
uses: actions/setup-java@v2
with:
java-version: 11
distribution: 'adopt'

- name: Verify build
run: ./gradlew clean check --stacktrace
shell: bash
12 changes: 10 additions & 2 deletions release.sh → bin/release.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
#!/usr/bin/env sh
# Copyright 2022 Adobe. All rights reserved.
# This file is licensed to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.

set -e

Expand All @@ -19,4 +27,4 @@ export ORG_GRADLE_PROJECT_ossrhPassword="${SONATYPE_PASSWORD}"
-Psigning.gnupg.useLegacyGpg=true \
-Psigning.gnupg.keyName="${GPG_KEY_ID}" \
-Psigning.gnupg.passphrase="${GPG_PASSPHRASE}" \
clean publish -x integrationTest --stacktrace
clean publish --stacktrace
12 changes: 5 additions & 7 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,6 @@ dependencies {
testImplementation "junit:junit:4.12"
testImplementation "org.mockito:mockito-core:2.23.4"
testImplementation "com.tngtech.java:junit-dataprovider:1.13.1"
testImplementation "org.testcontainers:localstack:1.12.5"
testImplementation group: 'org.testcontainers', name: 'testcontainers', version: "1.9.1"
testImplementation group: 'cloud.localstack', name: 'localstack-utils', version: '0.1.15'
testImplementation group: 'org.apache.hadoop', name: 'hadoop-aws', version: '3.1.1'
testImplementation "org.apache.hadoop:hadoop-common:3.1.2"
testImplementation "org.apache.hadoop:hadoop-common:3.1.2:tests"
Expand Down Expand Up @@ -163,8 +160,8 @@ publishing {
maven {
url = "https://oss.sonatype.org/service/local/staging/deploy/maven2/"
credentials {
username project.property("ossrhUsername").toString()
password project.property("ossrhPassword").toString()
username project.hasProperty("ossrhUsername") ? project.property("ossrhUsername").toString() : ""
password project.hasProperty("ossrhPassword") ? project.property("ossrhPassword").toString() : ""
}
}
}
Expand Down Expand Up @@ -208,6 +205,7 @@ signing {

afterEvaluate {
javadoc.dependsOn compileJava
jar.dependsOn test /* , integrationTest */ // remove running of integrationTests for now
shadowJar.dependsOn test, sourcesJar, javadocJar /* , integrationTest */ // remove running of integrationTests for now
check.dependsOn test, integrationTest
jar.dependsOn test, integrationTest
shadowJar.dependsOn test, sourcesJar, javadocJar, integrationTest
}
7 changes: 6 additions & 1 deletion docs/Contributing.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,9 @@ Read [GitHub's pull request documentation](https://help.github.com/articles/abou
## Integration tests

The integration tests use [localstack](https://github.com/localstack/localstack) to emulate cloud services such as S3 and DynamoDB.
To run integration tests locally you need to have [docker](https://docs.docker.com/docker-for-mac/install/) installed.
Localstack runs as a [docker](https://www.docker.com/) container.

Steps to run integration tests locally:
- Install docker [desktop](https://docs.docker.com/get-docker/).
- Install and start localstack following these [steps](https://docs.localstack.cloud/get-started/#localstack-cli).
- Run integration tests either from your IDE or though the gradle CLI `./gradle clean integrationTest`.
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,6 @@

package com.adobe.s3fs;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;

import com.adobe.s3fs.common.configuration.FileSystemConfiguration;
import com.adobe.s3fs.common.configuration.HadoopKeyValueConfiguration;
import com.adobe.s3fs.common.context.FileSystemContext;
Expand All @@ -28,52 +24,28 @@
import com.adobe.s3fs.utils.DynamoTable;
import com.adobe.s3fs.utils.ITUtils;
import com.adobe.s3fs.utils.InMemoryMetadataOperationLog;

import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.google.common.base.Functions;
import com.google.common.collect.*;

import com.google.common.collect.FluentIterable;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.collect.Streams;
import junit.framework.AssertionFailedError;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.localstack.LocalStackContainer;

import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import org.junit.*;

import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

public class DynamoDBMetadataStoreIntegrationTest {

@ClassRule
public static Network network = Network.newNetwork();
import static org.junit.Assert.*;

@ClassRule
public static LocalStackContainer localStackContainer = new LocalStackContainer()
.withNetwork(network)
.withServices(LocalStackContainer.Service.DYNAMODB, LocalStackContainer.Service.S3)
.withNetworkAliases("localstack");
public class DynamoDBMetadataStoreIntegrationTest {

@Rule
public DynamoTable dynamoTable = new DynamoTable(ITUtils.amazonDynamoDB(localStackContainer));
public DynamoTable dynamoTable = new DynamoTable(ITUtils.amazonDynamoDB());


private DynamoDBMetadataStore metadataStore;
Expand All @@ -88,7 +60,7 @@ public class DynamoDBMetadataStoreIntegrationTest {

@BeforeClass
public static void beforeClass() {
dynamoDB = ITUtils.amazonDynamoDB(localStackContainer);
dynamoDB = ITUtils.amazonDynamoDB();
}

@Before
Expand All @@ -99,7 +71,7 @@ public void setup() {

ITUtils.mapBucketToTable(configuration, "bucket", dynamoTable.getTable());

ITUtils.configureDynamoAccess(localStackContainer, configuration, "bucket");
ITUtils.configureDynamoAccess(configuration, "bucket");

ITUtils.configureAsyncOperations(configuration, "bucket", "ctx");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,87 +12,57 @@

package com.adobe.s3fs;

import static com.adobe.s3fs.utils.FileSystemStateChecker.checkFileSystemState;
import static com.adobe.s3fs.utils.FileSystemStateChecker.expectedDirectory;
import static com.adobe.s3fs.utils.FileSystemStateChecker.expectedFile;
import static com.adobe.s3fs.utils.OperationLogStateChecker.checkOperationLogState;
import static com.adobe.s3fs.utils.stream.StreamUtils.uncheckedRunnable;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import com.adobe.s3fs.common.runtime.FileSystemRuntimeFactory;
import com.adobe.s3fs.filesystem.HadoopFileSystemAdapter;
import com.adobe.s3fs.utils.DynamoTable;
import com.adobe.s3fs.utils.ExpectedFSObject;
import com.adobe.s3fs.utils.ITUtils;
import com.adobe.s3fs.utils.S3Bucket;

import com.amazonaws.services.s3.AmazonS3;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.BasicConfigurator;
import org.apache.hadoop.fs.*;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.localstack.LocalStackContainer;

import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static com.adobe.s3fs.utils.FileSystemStateChecker.*;
import static com.adobe.s3fs.utils.OperationLogStateChecker.checkOperationLogState;
import static com.adobe.s3fs.utils.stream.StreamUtils.uncheckedRunnable;
import static org.junit.Assert.*;

public class FileSystemIntegrationTest {

static {
LogManager.getRootLogger().setLevel(Level.INFO);
}

@ClassRule
public static Network network = Network.newNetwork();

@ClassRule
public static LocalStackContainer localStackContainer = new LocalStackContainer()
.withNetwork(network)
.withServices(LocalStackContainer.Service.DYNAMODB, LocalStackContainer.Service.S3)
.withNetworkAliases("localstack");

@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();

@Rule
public DynamoTable dynamoTable1 = new DynamoTable(ITUtils.amazonDynamoDB(localStackContainer));
public DynamoTable dynamoTable1 = new DynamoTable(ITUtils.amazonDynamoDB());

@Rule
public S3Bucket bucket1 = new S3Bucket(ITUtils.amazonS3(localStackContainer));
public S3Bucket bucket1 = new S3Bucket(ITUtils.amazonS3());

@Rule
public S3Bucket operationLogBucket = new S3Bucket(ITUtils.amazonS3(localStackContainer));
public S3Bucket operationLogBucket = new S3Bucket(ITUtils.amazonS3());

private Configuration configuration;

Expand All @@ -102,20 +72,20 @@ public class FileSystemIntegrationTest {

@Before
public void setup() throws IOException {
s3 = ITUtils.amazonS3(localStackContainer);
s3 = ITUtils.amazonS3();

configuration = new Configuration(false);
configuration.setClass("fs.s3.impl", HadoopFileSystemAdapter.class, FileSystem.class);
configuration.setBoolean("fs.s3.impl.disable.cache", true);
ITUtils.configureAsyncOperations(configuration, bucket1.getBucket(), "ctx");

ITUtils.configureDynamoAccess(localStackContainer, configuration, bucket1.getBucket());
ITUtils.configureDynamoAccess(configuration, bucket1.getBucket());
ITUtils.mapBucketToTable(configuration, bucket1.getBucket(), dynamoTable1.getTable());

ITUtils.configureS3OperationLog(configuration, bucket1.getBucket(), operationLogBucket.getBucket());
ITUtils.configureS3OperationLogAccess(localStackContainer, configuration, bucket1.getBucket());
ITUtils.configureS3OperationLogAccess(configuration, bucket1.getBucket());

ITUtils.configureS3AAsUnderlyingFileSystem(localStackContainer, configuration, bucket1.getBucket(), temporaryFolder.getRoot().toString());
ITUtils.configureS3AAsUnderlyingFileSystem(configuration, bucket1.getBucket(), temporaryFolder.getRoot().toString());

ITUtils.configureSuffixCount(configuration, bucket1.getBucket(), 10);

Expand Down
22 changes: 7 additions & 15 deletions src/integrationTest/java/com/adobe/s3fs/S3PrefixListerTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,12 @@
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.junit.*;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.localstack.LocalStackContainer;

import java.io.*;
import java.nio.charset.StandardCharsets;
Expand All @@ -49,22 +50,13 @@

public class S3PrefixListerTest {

@ClassRule public static Network network = Network.newNetwork();

@ClassRule
public static LocalStackContainer localStackContainer =
new LocalStackContainer()
.withNetwork(network)
.withServices(LocalStackContainer.Service.S3)
.withNetworkAliases("localstack");

static {
LogManager.getRootLogger().setLevel(Level.INFO);
}

@Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();

@Rule public S3Bucket bucket1 = new S3Bucket(ITUtils.amazonS3(localStackContainer));
@Rule public S3Bucket bucket1 = new S3Bucket(ITUtils.amazonS3());

private S3PrefixLister s3PrefixLister;

Expand Down Expand Up @@ -119,7 +111,7 @@ private void verifyList(List<String> expected, List<String> actual) {

@Before
public void setup() throws IOException {
s3 = ITUtils.amazonS3(localStackContainer);
s3 = ITUtils.amazonS3();

s3Spy = Mockito.spy(s3);

Expand All @@ -130,7 +122,7 @@ public void setup() throws IOException {
configuration.setBoolean("fs.s3a.multiobjectdelete.enable", false);

ITUtils.configureS3AAsUnderlyingFileSystem(
localStackContainer, configuration, bucket1.getBucket(), temporaryFolder.getRoot().toString());
configuration, bucket1.getBucket(), temporaryFolder.getRoot().toString());

fileSystem = pathInBucket(bucket1.getBucket(), "").getFileSystem(configuration);

Expand Down
Loading