From c07513630cbc8d7f3382e0245bca7c38fc8f67f9 Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Thu, 5 Jun 2025 15:36:07 +0530 Subject: [PATCH 1/4] debug --- .github/workflows/e2e.yml | 2 +- .../features/bigqueryexecute/BQExecute.feature | 2 +- .../features/bigtable/BigTableToBigTable.feature | 10 +++++----- src/e2e-test/features/datastore/runtime.feature | 2 +- src/e2e-test/features/gcscopy/GCSCopy.feature | 2 +- src/e2e-test/features/gcscreate/GCSCreate.feature | 4 ++-- src/e2e-test/features/gcsdelete/GCSDelete.feature | 2 +- src/e2e-test/features/gcsmove/GCSMove.feature | 4 ++-- src/e2e-test/features/pubsub/sink/BQToPubSub.feature | 2 +- .../features/spanner/source/SpannertoGCS.feature | 4 ++-- 10 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 1b05c31eec..8c21d90461 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -40,7 +40,7 @@ jobs: ) strategy: matrix: - tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy, datastore, bigtable] + tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy, bigtable, datastore] fail-fast: false steps: # Pinned 1.0.0 version diff --git a/src/e2e-test/features/bigqueryexecute/BQExecute.feature b/src/e2e-test/features/bigqueryexecute/BQExecute.feature index d5eb52df60..1a36b2de9e 100644 --- a/src/e2e-test/features/bigqueryexecute/BQExecute.feature +++ b/src/e2e-test/features/bigqueryexecute/BQExecute.feature @@ -3,7 +3,7 @@ Feature: BigQueryExecute - Verify data transfer using BigQuery Execute plugin @BQ_SOURCE_TEST @BQ_SINK_TEST @BQ_EXECUTE_SQL @BQExecute_Required Scenario: Verify Store results in a BigQuery Table functionality of BQExecute plugin - Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configure pipelines When Expand Plugin group in the LHS plugins list: "Conditions and Actions" When Select plugin: "BigQuery Execute" from the plugins list as: "Conditions and Actions" When Navigate to the properties page of plugin: "BigQuery Execute" diff --git a/src/e2e-test/features/bigtable/BigTableToBigTable.feature b/src/e2e-test/features/bigtable/BigTableToBigTable.feature index 717adfaf32..128bbe7c64 100644 --- a/src/e2e-test/features/bigtable/BigTableToBigTable.feature +++ b/src/e2e-test/features/bigtable/BigTableToBigTable.feature @@ -11,13 +11,13 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. -@BigTable @BIGTABLE_SOURCE_TEST +@BigTable Feature: BigTable source - Verification of BigTable to BigTable Successful Data Transfer - @BIGTABLE_SINK_TEST @bigtable_Required + @BIGTABLE_SOURCE_TEST @BIGTABLE_SINK_TEST @bigtable_Required Scenario: To verify data is getting transferred from BigTable source table to BigTable sink table Given Open Datafusion Project to configure pipeline - When Select plugin: "Bigtable" from the plugins list as: "Source" + When Select plugin: "Bigtable" from the plugins list as: "Source"S When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "Bigtable" from the plugins list as: "Sink" Then Connect plugins: "Bigtable" and "Bigtable2" to establish connection @@ -51,7 +51,7 @@ Feature: BigTable source - Verification of BigTable to BigTable Successful Data Then Validate OUT record count is equal to IN record count Then Validate data transferred to target bigtable table with data of source bigtable table - @EXISTING_BIGTABLE_SINK + @BIGTABLE_SOURCE_TEST @EXISTING_BIGTABLE_SINK Scenario: To verify data is getting transferred from BigTable source table to existing BigTable sink Given Open Datafusion Project to configure pipeline When Select plugin: "Bigtable" from the plugins list as: "Source" @@ -88,7 +88,7 @@ Feature: BigTable source - Verification of BigTable to BigTable Successful Data Then Validate OUT record count is equal to IN record count Then Validate data transferred to existing target bigtable table with data of source bigtable table - @BIGTABLE_SINK_TEST + @BIGTABLE_SOURCE_TEST @BIGTABLE_SINK_TEST Scenario: To verify data is getting transferred from unvalidated BigTable source table to BigTable sink table Given Open Datafusion Project to configure pipeline When Select plugin: "Bigtable" from the plugins list as: "Source" diff --git a/src/e2e-test/features/datastore/runtime.feature b/src/e2e-test/features/datastore/runtime.feature index 90a3cd2b1d..43799494a6 100644 --- a/src/e2e-test/features/datastore/runtime.feature +++ b/src/e2e-test/features/datastore/runtime.feature @@ -17,7 +17,7 @@ Feature: DataStore - Verification of Datastore to Datastore Successful Data Tran @DATASTORE_SOURCE_ENTITY @datastore_Required Scenario: To verify data is getting transferred from Datastore to Datastore successfully using filter and custom index - Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configure pipelines Then Select plugin: "Datastore" from the plugins list as: "Source" And Navigate to the properties page of plugin: "Datastore" Then Replace input plugin property: "project" with value: "projectId" diff --git a/src/e2e-test/features/gcscopy/GCSCopy.feature b/src/e2e-test/features/gcscopy/GCSCopy.feature index 6d4614fcb2..d912d8e339 100644 --- a/src/e2e-test/features/gcscopy/GCSCopy.feature +++ b/src/e2e-test/features/gcscopy/GCSCopy.feature @@ -17,7 +17,7 @@ Feature:GCSCopy - Verification of successful objects copy from one bucket to ano @CMEK @GCS_CSV_TEST @GCS_SINK_TEST @GCSCopy_Required @ITN_TEST Scenario:Validate successful copy object from one bucket to another new bucket along with data validation with default subdirectory and overwrite toggle button as false. - Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configurex pipeline When Expand Plugin group in the LHS plugins list: "Conditions and Actions" When Select plugin: "GCS Copy" from the plugins list as: "Conditions and Actions" When Navigate to the properties page of plugin: "GCS Copy" diff --git a/src/e2e-test/features/gcscreate/GCSCreate.feature b/src/e2e-test/features/gcscreate/GCSCreate.feature index d9c1860c56..b78900709d 100644 --- a/src/e2e-test/features/gcscreate/GCSCreate.feature +++ b/src/e2e-test/features/gcscreate/GCSCreate.feature @@ -3,7 +3,7 @@ Feature: GCSCreate - Verification of GCS Create plugin @GCS_CSV_TEST Scenario: Verify GCSCreate successfully creates objects in the GCS bucket - Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configure pipelines When Expand Plugin group in the LHS plugins list: "Conditions and Actions" When Select plugin: "GCS Create" from the plugins list as: "Conditions and Actions" When Navigate to the properties page of plugin: "GCS Create" @@ -31,7 +31,7 @@ Feature: GCSCreate - Verification of GCS Create plugin When Navigate to the properties page of plugin: "GCS Create" Then Enter the GCS Create property projectId "projectId" Then Enter the GCS Create property objects to create as path "gcsCsvFile" - Then Select GCS Create property fail if objects exists as "true" + Then Select GCS Create property fail if objects exists as s"true" Then Override Service account details if set in environment variables Then Validate "GCS Create" plugin properties Then Close the GCS Create properties diff --git a/src/e2e-test/features/gcsdelete/GCSDelete.feature b/src/e2e-test/features/gcsdelete/GCSDelete.feature index 21db22f8ac..b08f50517a 100644 --- a/src/e2e-test/features/gcsdelete/GCSDelete.feature +++ b/src/e2e-test/features/gcsdelete/GCSDelete.feature @@ -12,7 +12,7 @@ Feature: GCS Delete - Verification of GCS Delete plugin Then Override Service account details if set in environment variables Then Validate "GCS Delete" plugin properties Then Close the GCS Delete properties - Then Save and Deploy Pipeline + Then Save and Deploy Pipelines Then Run the Pipeline in Runtime Then Wait till pipeline is in running state Then Open and capture logs diff --git a/src/e2e-test/features/gcsmove/GCSMove.feature b/src/e2e-test/features/gcsmove/GCSMove.feature index e79e572483..ac8c161804 100644 --- a/src/e2e-test/features/gcsmove/GCSMove.feature +++ b/src/e2e-test/features/gcsmove/GCSMove.feature @@ -3,11 +3,11 @@ Feature:GCSMove - Verification of successful objects move from one bucket to ano @CMEK @GCS_CSV_TEST @GCS_SINK_TEST Scenario:Validate successful move object from one bucket to another new bucket - Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configure pipelinex When Expand Plugin group in the LHS plugins list: "Conditions and Actions" When Select plugin: "GCS Move" from the plugins list as: "Conditions and Actions" When Navigate to the properties page of plugin: "GCS Move" - Then Enter GCSMove property projectId "projectId" + Then Enter GCSMove property projectId "projectId"x Then Enter GCSMove property source path "gcsCsvFile" Then Enter GCSMove property destination path Then Override Service account details if set in environment variables diff --git a/src/e2e-test/features/pubsub/sink/BQToPubSub.feature b/src/e2e-test/features/pubsub/sink/BQToPubSub.feature index 8a1b5aeabd..d5738958f7 100644 --- a/src/e2e-test/features/pubsub/sink/BQToPubSub.feature +++ b/src/e2e-test/features/pubsub/sink/BQToPubSub.feature @@ -3,7 +3,7 @@ Feature: PubSub-Sink - Verification of BigQuery to PubSub successful data transf @CMEK @BQ_SOURCE_TEST Scenario: To verify data is getting transferred from BigQuery to PubSub successfully - Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configure pipelines When Source is BigQuery When Sink is PubSub Then Connect source as "BigQuery" and sink as "GooglePublisher" to establish connection diff --git a/src/e2e-test/features/spanner/source/SpannertoGCS.feature b/src/e2e-test/features/spanner/source/SpannertoGCS.feature index 5c4461b294..e2291157aa 100644 --- a/src/e2e-test/features/spanner/source/SpannertoGCS.feature +++ b/src/e2e-test/features/spanner/source/SpannertoGCS.feature @@ -2,8 +2,8 @@ Feature: Spanner Source - Verification of Spanner to GCS successful data transfer @GCS_SINK_TEST @Spanner_Source_Required - Scenario: Verify data is getting transferred from Spanner to GCS successfully - Given Open Datafusion Project to configure pipeline + Scenario: Verify data is getting transferred from Spanner to GCS successfullys + Given Open Datafusion Project to configure pipelines When Source is Spanner When Sink is GCS Then Connect source as "Spanner" and sink as "GCS" to establish connection From d052b9b960bb7b4303de8220a03cf2107ff820d4 Mon Sep 17 00:00:00 2001 From: psainics Date: Tue, 10 Jun 2025 12:04:55 +0530 Subject: [PATCH 2/4] Test out adding a verify goal --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index 2d0450cae0..c365c69dbc 100644 --- a/pom.xml +++ b/pom.xml @@ -1252,6 +1252,7 @@ integration-test + verify From 5edd671189eb4a39726af083d2e1c3144290f470 Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Tue, 10 Jun 2025 12:55:38 +0530 Subject: [PATCH 3/4] debugging --- pom.xml | 1 + src/e2e-test/features/bigqueryexecute/BQExecute.feature | 4 ++-- src/e2e-test/features/gcscreate/GCSCreate.feature | 2 +- src/e2e-test/features/gcsdelete/GCSDelete.feature | 4 ++-- src/e2e-test/features/pubsub/sink/BQToPubSub.feature | 4 ++-- src/e2e-test/features/spanner/source/SpannertoGCS.feature | 4 ++-- 6 files changed, 10 insertions(+), 9 deletions(-) diff --git a/pom.xml b/pom.xml index c365c69dbc..8be2e66c73 100644 --- a/pom.xml +++ b/pom.xml @@ -1247,6 +1247,7 @@ ${SERVICE_ACCOUNT_JSON} + false diff --git a/src/e2e-test/features/bigqueryexecute/BQExecute.feature b/src/e2e-test/features/bigqueryexecute/BQExecute.feature index 1a36b2de9e..d9e632c75c 100644 --- a/src/e2e-test/features/bigqueryexecute/BQExecute.feature +++ b/src/e2e-test/features/bigqueryexecute/BQExecute.feature @@ -3,10 +3,10 @@ Feature: BigQueryExecute - Verify data transfer using BigQuery Execute plugin @BQ_SOURCE_TEST @BQ_SINK_TEST @BQ_EXECUTE_SQL @BQExecute_Required Scenario: Verify Store results in a BigQuery Table functionality of BQExecute plugin - Given Open Datafusion Project to configure pipelines + Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Conditions and Actions" When Select plugin: "BigQuery Execute" from the plugins list as: "Conditions and Actions" - When Navigate to the properties page of plugin: "BigQuery Execute" + When Navigate to the properties page of plugin: "BigQuerhgfhgfghhg" Then Replace input plugin property: "projectId" with value: "projectId" Then Enter textarea plugin property: "sql" with value: "bqExecuteQuery" Then Click plugin property: "storeResultsInBigQueryTable" diff --git a/src/e2e-test/features/gcscreate/GCSCreate.feature b/src/e2e-test/features/gcscreate/GCSCreate.feature index b78900709d..11667f2685 100644 --- a/src/e2e-test/features/gcscreate/GCSCreate.feature +++ b/src/e2e-test/features/gcscreate/GCSCreate.feature @@ -31,7 +31,7 @@ Feature: GCSCreate - Verification of GCS Create plugin When Navigate to the properties page of plugin: "GCS Create" Then Enter the GCS Create property projectId "projectId" Then Enter the GCS Create property objects to create as path "gcsCsvFile" - Then Select GCS Create property fail if objects exists as s"true" + Then Select GCS Create property fail if objects exists as "truhjhjjhjhe" Then Override Service account details if set in environment variables Then Validate "GCS Create" plugin properties Then Close the GCS Create properties diff --git a/src/e2e-test/features/gcsdelete/GCSDelete.feature b/src/e2e-test/features/gcsdelete/GCSDelete.feature index b08f50517a..737892c7fc 100644 --- a/src/e2e-test/features/gcsdelete/GCSDelete.feature +++ b/src/e2e-test/features/gcsdelete/GCSDelete.feature @@ -6,13 +6,13 @@ Feature: GCS Delete - Verification of GCS Delete plugin Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Conditions and Actions" When Select plugin: "GCS Delete" from the plugins list as: "Conditions and Actions" - When Navigate to the properties page of plugin: "GCS Delete" + When Navigate to the properties page of plugin: "GCS Dkjnkjjjelete" Then Enter the GCS Delete property projectId "projectId" Then Enter the GCS Delete property objects to delete as bucketName Then Override Service account details if set in environment variables Then Validate "GCS Delete" plugin properties Then Close the GCS Delete properties - Then Save and Deploy Pipelines + Then Save and Deploy Pipeline Then Run the Pipeline in Runtime Then Wait till pipeline is in running state Then Open and capture logs diff --git a/src/e2e-test/features/pubsub/sink/BQToPubSub.feature b/src/e2e-test/features/pubsub/sink/BQToPubSub.feature index d5738958f7..fc457b600e 100644 --- a/src/e2e-test/features/pubsub/sink/BQToPubSub.feature +++ b/src/e2e-test/features/pubsub/sink/BQToPubSub.feature @@ -3,10 +3,10 @@ Feature: PubSub-Sink - Verification of BigQuery to PubSub successful data transf @CMEK @BQ_SOURCE_TEST Scenario: To verify data is getting transferred from BigQuery to PubSub successfully - Given Open Datafusion Project to configure pipelines + Given Open Datafusion Project to configure pipeline When Source is BigQuery When Sink is PubSub - Then Connect source as "BigQuery" and sink as "GooglePublisher" to establish connection + Then Connect source as "nbjhbj" and sink as "GooglePublisher" to establish connection Then Open BigQuery source properties Then Override Service account details if set in environment variables Then Enter the BigQuery source mandatory properties diff --git a/src/e2e-test/features/spanner/source/SpannertoGCS.feature b/src/e2e-test/features/spanner/source/SpannertoGCS.feature index e2291157aa..e003ac27b2 100644 --- a/src/e2e-test/features/spanner/source/SpannertoGCS.feature +++ b/src/e2e-test/features/spanner/source/SpannertoGCS.feature @@ -3,10 +3,10 @@ Feature: Spanner Source - Verification of Spanner to GCS successful data transfe @GCS_SINK_TEST @Spanner_Source_Required Scenario: Verify data is getting transferred from Spanner to GCS successfullys - Given Open Datafusion Project to configure pipelines + Given Open Datafusion Project to configure pipeline When Source is Spanner When Sink is GCS - Then Connect source as "Spanner" and sink as "GCS" to establish connection + Then Connect source as "Spanjbhjbner" and sink as "GCS" to establish connection Then Open Spanner source properties Then Enter Spanner property reference name Then Enter Spanner property projectId "projectId" From 1933137a19e54739849c31de70e2798c04b69fb2 Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Tue, 10 Jun 2025 14:36:33 +0530 Subject: [PATCH 4/4] adding strict tag --- .../java/io/cdap/plugin/bigtable/runners/TestRunner.java | 3 ++- .../java/io/cdap/plugin/datastore/runner/TestRunner.java | 3 ++- .../java/io/cdap/plugin/gcscopy/runners/TestRunner.java | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/e2e-test/java/io/cdap/plugin/bigtable/runners/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/bigtable/runners/TestRunner.java index d135cc7e54..0fd2c23daf 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigtable/runners/TestRunner.java +++ b/src/e2e-test/java/io/cdap/plugin/bigtable/runners/TestRunner.java @@ -31,7 +31,8 @@ monochrome = true, plugin = {"pretty", "html:target/cucumber-html-report/bigtable", "json:target/cucumber-reports/cucumber-bigtable.json", - "junit:target/cucumber-reports/cucumber-bigtable.xml"} + "junit:target/cucumber-reports/cucumber-bigtable.xml"}, + strict = true // Fail on undefined steps ) public class TestRunner { } diff --git a/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java index 1057d0c1d7..e1fc71a937 100644 --- a/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java +++ b/src/e2e-test/java/io/cdap/plugin/datastore/runner/TestRunner.java @@ -31,7 +31,8 @@ monochrome = true, plugin = {"pretty", "html:target/cucumber-html-report/datastore", "json:target/cucumber-reports/cucumber-datastore.json", - "junit:target/cucumber-reports/cucumber-datastore.xml"} + "junit:target/cucumber-reports/cucumber-datastore.xml"}, + strict = true // Fail on undefined steps ) public class TestRunner { } diff --git a/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunner.java index 91ae566df9..e9b24cc11d 100644 --- a/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunner.java +++ b/src/e2e-test/java/io/cdap/plugin/gcscopy/runners/TestRunner.java @@ -31,7 +31,8 @@ monochrome = true, plugin = {"pretty", "html:target/cucumber-html-report/gcscopy-action", "json:target/cucumber-reports/cucumber-gcscopy-action.json", - "junit:target/cucumber-reports/cucumber-gcscopy-action.xml"} + "junit:target/cucumber-reports/cucumber-gcscopy-action.xml"}, + strict = true // Fail on undefined steps ) public class TestRunner { }