Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -77,13 +77,9 @@ jobs:
python-version: default
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: GCloud Docker credential helper
run: |
gcloud auth configure-docker us.gcr.io
- name: run XVR GoUsingJava Dataflow script
env:
USER: github-actions
CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}}
uses: ./.github/actions/gradle-command-self-hosted-action
with:
gradle-command: :runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerGoUsingJava
Expand Down
117 changes: 70 additions & 47 deletions runners/google-cloud-dataflow-java/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -212,17 +212,17 @@ def commonLegacyExcludeCategories = [
]

def commonRunnerV2ExcludeCategories = [
'org.apache.beam.sdk.testing.UsesExternalService',
'org.apache.beam.sdk.testing.UsesGaugeMetrics',
'org.apache.beam.sdk.testing.UsesSetState',
'org.apache.beam.sdk.testing.UsesMapState',
'org.apache.beam.sdk.testing.UsesMultimapState',
'org.apache.beam.sdk.testing.UsesMetricsPusher',
'org.apache.beam.sdk.testing.UsesOrderedListState',
'org.apache.beam.sdk.testing.UsesTestStream',
'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime',
'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
'org.apache.beam.sdk.testing.UsesBoundedTrieMetrics', // Dataflow QM as of now does not support returning back BoundedTrie in metric result.
'org.apache.beam.sdk.testing.UsesExternalService',
'org.apache.beam.sdk.testing.UsesGaugeMetrics',
'org.apache.beam.sdk.testing.UsesSetState',
'org.apache.beam.sdk.testing.UsesMapState',
'org.apache.beam.sdk.testing.UsesMultimapState',
'org.apache.beam.sdk.testing.UsesMetricsPusher',
'org.apache.beam.sdk.testing.UsesOrderedListState',
'org.apache.beam.sdk.testing.UsesTestStream',
'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime',
'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
'org.apache.beam.sdk.testing.UsesBoundedTrieMetrics', // Dataflow QM as of now does not support returning back BoundedTrie in metric result.
]

def createLegacyWorkerValidatesRunnerTest = { Map args ->
Expand All @@ -241,7 +241,7 @@ def createLegacyWorkerValidatesRunnerTest = { Map args ->
maxParallelForks Integer.MAX_VALUE
classpath = configurations.validatesRunner
testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) +
files(project(project.path).sourceSets.test.output.classesDirs)
files(project(project.path).sourceSets.test.output.classesDirs)
useJUnit {
includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
commonLegacyExcludeCategories.each {
Expand Down Expand Up @@ -274,7 +274,7 @@ def createRunnerV2ValidatesRunnerTest = { Map args ->
maxParallelForks Integer.MAX_VALUE
classpath = configurations.validatesRunner
testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) +
files(project(project.path).sourceSets.test.output.classesDirs)
files(project(project.path).sourceSets.test.output.classesDirs)
useJUnit {
includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
commonRunnerV2ExcludeCategories.each {
Expand Down Expand Up @@ -306,9 +306,9 @@ def buildAndPushDockerJavaContainer = tasks.register("buildAndPushDockerJavaCont

dependsOn ":sdks:java:container:${javaVer}:docker"
def defaultDockerImageName = containerImageName(
name: "${project.docker_image_default_repo_prefix}${javaVer}_sdk",
root: "apache",
tag: project.sdk_version)
name: "${project.docker_image_default_repo_prefix}${javaVer}_sdk",
root: "apache",
tag: project.sdk_version)
doLast {
exec {
commandLine "docker", "tag", "${defaultDockerImageName}", "${dockerJavaImageName}"
Expand Down Expand Up @@ -368,14 +368,37 @@ def buildAndPushDockerPythonContainer = tasks.create("buildAndPushDockerPythonCo
def pythonVer = project.project(':sdks:python').pythonVersion
dependsOn ":sdks:python:container:py"+pythonVer.replace('.', '')+":docker"
def defaultDockerImageName = containerImageName(
name: "${project.docker_image_default_repo_prefix}python${pythonVer}_sdk",
root: "apache",
tag: project.sdk_version)
name: "${project.docker_image_default_repo_prefix}python${pythonVer}_sdk",
root: "apache",
tag: project.sdk_version)
doFirst {
def cloudsdkConfig = System.getenv("CLOUDSDK_CONFIG")
if (cloudsdkConfig == null || !new File(cloudsdkConfig).canWrite()) {
cloudsdkConfig = "/tmp/gcloud"
}
if (cloudsdkConfig == "/tmp/gcloud") {
def tmpGcloudDir = new File(cloudsdkConfig)
tmpGcloudDir.mkdirs()
System.setProperty("CLOUDSDK_CONFIG", cloudsdkConfig)
}
exec {
environment "CLOUDSDK_CONFIG", cloudsdkConfig
commandLine "gcloud", "--quiet", "auth", "configure-docker", "us.gcr.io"
ignoreExitValue = false
}
exec {
environment "CLOUDSDK_CONFIG", cloudsdkConfig
commandLine "gcloud", "--quiet", "auth", "configure-docker", "gcr.io"
ignoreExitValue = false
}
}
doLast {
exec {
commandLine "docker", "tag", "${defaultDockerImageName}", "${dockerPythonImageName}"
}
def cloudsdkConfig = System.getenv("CLOUDSDK_CONFIG") ?: System.getProperty("CLOUDSDK_CONFIG") ?: "/tmp/gcloud"
exec {
environment "CLOUDSDK_CONFIG", cloudsdkConfig
commandLine "gcloud", "docker", "--", "push", "${dockerPythonImageName}"
}
}
Expand Down Expand Up @@ -594,13 +617,13 @@ task googleCloudPlatformLegacyWorkerIntegrationTest(type: Test, dependsOn: copyG
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${gcpProject}",
"--region=${gcpRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
"--firestoreDb=${firestoreDb}",
"--runner=TestDataflowRunner",
"--project=${gcpProject}",
"--region=${gcpRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
"--firestoreDb=${firestoreDb}",
])

include '**/*IT.class'
Expand Down Expand Up @@ -630,14 +653,14 @@ task googleCloudPlatformLegacyWorkerKmsIntegrationTest(type: Test) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${gcpProject}",
"--region=${gcpRegion}",
"--tempRoot=${dataflowPostCommitTempRootKms}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
"--dataflowKmsKey=${dataflowKmsKey}",
"--firestoreDb=${firestoreDb}",
"--runner=TestDataflowRunner",
"--project=${gcpProject}",
"--region=${gcpRegion}",
"--tempRoot=${dataflowPostCommitTempRootKms}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
"--dataflowKmsKey=${dataflowKmsKey}",
"--firestoreDb=${firestoreDb}",
])

include '**/*IT.class'
Expand Down Expand Up @@ -732,12 +755,12 @@ task coreSDKJavaLegacyWorkerIntegrationTest(type: Test) {
dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"

systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${gcpProject}",
"--region=${gcpRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
"--runner=TestDataflowRunner",
"--project=${gcpProject}",
"--region=${gcpRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
])

include '**/*IT.class'
Expand Down Expand Up @@ -837,17 +860,17 @@ createJavaExamplesArchetypeValidationTask(type: 'MobileGaming',

// Generates :runners:google-cloud-dataflow-java:runMobileGamingJavaDataflowBom
createJavaExamplesArchetypeValidationTask(type: 'MobileGaming',
runner: 'DataflowBom',
gcpProject: gcpProject,
gcpRegion: gcpRegion,
gcsBucket: gcsBucket,
bqDataset: bqDataset,
pubsubTopic: pubsubTopic)
runner: 'DataflowBom',
gcpProject: gcpProject,
gcpRegion: gcpRegion,
gcsBucket: gcsBucket,
bqDataset: bqDataset,
pubsubTopic: pubsubTopic)

// Standalone task for testing GCS upload, use with -PfilesToStage and -PgcpTempRoot.
task GCSUpload(type: JavaExec) {
mainClass = 'org.apache.beam.runners.dataflow.util.GCSUploadMain'
classpath = sourceSets.test.runtimeClasspath
args "--stagingLocation=${dataflowUploadTemp}/staging",
"--filesToStage=${testFilesToStage}"
"--filesToStage=${testFilesToStage}"
}
Loading