diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c93199a9..c2e1fcae 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -37,12 +37,12 @@ jobs: && (github.event.action != 'labeled' || github.event.label.name == 'build') ) steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.workflow_run.head_sha }} - name: Cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }} diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index de978d58..88266589 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -15,33 +15,44 @@ name: Build e2e tests on: - workflow_run: - workflows: - - Trigger build - types: - - completed + push: + branches: [ develop, release/* ] + pull_request: + branches: [ develop, release/* ] + types: [opened, synchronize, reopened, labeled] + workflow_dispatch: jobs: - build: + build-e2e-tests: runs-on: k8s-runner-e2e - - if: ${{ github.event.workflow_run.conclusion != 'skipped' }} + # We allow builds: + # 1) When triggered manually + # 2) When it's a merge into a branch + # 3) For PRs that are labeled as build and + # - It's a code change + # - A build label was just added + # A bit complex, but prevents builds when other labels are manipulated + if: > + github.event_name == 'workflow_dispatch' + || github.event_name == 'push' + || (contains(github.event.pull_request.labels.*.name, 'build') + && (github.event.action != 'labeled' || github.event.label.name == 'build') + ) steps: - - uses: haya14busa/action-workflow_run-status@967ed83efa565c257675ed70cfe5231f062ddd94 - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: path: plugin ref: ${{ github.event.workflow_run.head_sha }} - name: Checkout e2e test repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: cdapio/cdap-e2e-tests path: e2e - name: Cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }} @@ -50,7 +61,7 @@ jobs: - name: Get Secrets from GCP Secret Manager id: 'secrets' - uses: 'google-github-actions/get-secretmanager-secrets@v0' + uses: 'google-github-actions/get-secretmanager-secrets@v2' with: secrets: |- SERVICE_NOW_CLIENT_ID:cdapio-github-builds/SERVICE_NOW_CLIENT_ID @@ -68,33 +79,20 @@ jobs: SERVICE_NOW_USERNAME: ${{ steps.secrets.outputs.SERVICE_NOW_USERNAME }} SERVICE_NOW_PASSWORD: ${{ steps.secrets.outputs.SERVICE_NOW_PASSWORD }} - - name: Upload report - uses: actions/upload-artifact@v3 - if: always() - with: - name: Cucumber report - path: ./plugin/target/cucumber-reports - - name: Upload debug files - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: Debug files path: ./**/target/e2e-debug - name: Upload reports to GCS - uses: google-github-actions/upload-cloud-storage@v0 + uses: google-github-actions/upload-cloud-storage@v2 if: always() with: path: ./plugin/target/cucumber-reports destination: e2e-tests-cucumber-reports/${{ github.event.repository.name }}/${{ github.ref }} - - name: github-status-action - uses: Sibz/github-status-action@67af1f4042a5a790681aad83c44008ca6cfab83d + - name: Cucumber Report URL if: always() - with: - authToken: ${{ secrets.GITHUB_TOKEN }} - state: success - context: Cucumber report - sha: ${{github.event.pull_request.head.sha || github.sha}} - + run: echo "https://storage.googleapis.com/e2e-tests-cucumber-reports/${{ github.event.repository.name }}/${{ github.ref }}/cucumber-reports/advanced-reports/cucumber-html-reports/overview-features.html" diff --git a/pom.xml b/pom.xml index 6d723e5e..b60c825b 100644 --- a/pom.xml +++ b/pom.xml @@ -531,7 +531,7 @@ io.cdap.tests.e2e cdap-e2e-framework - 0.3.0-SNAPSHOT + 0.5.0-SNAPSHOT test diff --git a/src/e2e-test/features/servicenowmultisource/DesignTimeValidation.feature b/src/e2e-test/features/servicenowmultisource/DesignTimeValidation.feature index 39be0141..fce48dc4 100644 --- a/src/e2e-test/features/servicenowmultisource/DesignTimeValidation.feature +++ b/src/e2e-test/features/servicenowmultisource/DesignTimeValidation.feature @@ -36,7 +36,7 @@ Feature: ServiceNow Multi Source - Design time validation scenarios | INVALID_TABLE | And fill Credentials section for pipeline user And Click on the Validate button - Then Verify that the Plugin Property: "tableNames" is displaying an in-line error message: "invalid.property.tablename" + Then Verify that the Plugin is displaying an error message: "invalid.property.tablename" on the header @TS-SN-MULTI-DSGN-ERROR-03 Scenario: Verify validation message for Start date and End date in invalid format diff --git a/src/e2e-test/features/servicenowmultisource/RunTimeWithMacros.feature b/src/e2e-test/features/servicenowmultisource/RunTimeWithMacros.feature index dffc7a94..ff7ae949 100644 --- a/src/e2e-test/features/servicenowmultisource/RunTimeWithMacros.feature +++ b/src/e2e-test/features/servicenowmultisource/RunTimeWithMacros.feature @@ -106,7 +106,7 @@ Feature: ServiceNow Multi Source - Run time scenarios (macro) And Verify the pipeline status is "Failed" Then Open Pipeline logs and verify Log entries having below listed Level and Message: | Level | Message | - | ERROR | invalid.tablenames.logsmessage | + | ERROR | invalid.tablename.logsmessage | @TS-SN-RNTM-MACRO-04 @BQ_SINK Scenario: Verify pipeline failure message in logs when user provides invalid Advanced Properties with Macros diff --git a/src/e2e-test/features/servicenowsource/RunTime.feature b/src/e2e-test/features/servicenowsource/RunTime.feature index 6f2391dc..9b9a147e 100644 --- a/src/e2e-test/features/servicenowsource/RunTime.feature +++ b/src/e2e-test/features/servicenowsource/RunTime.feature @@ -19,28 +19,29 @@ Feature: ServiceNow Source - Run time scenarios @TS-SN-RNTM-1 @SN_SOURCE_CONFIG @SN_RECEIVING_SLIP_LINE @BQ_SINK - Scenario: Verify user should be able to preview the pipeline where ServiceNow source is configured for Table mode + Scenario: Verify user should be able to preview the pipeline where ServiceNow source is configured for Table mode with value type display When Open Datafusion Project to configure pipeline And Select plugin: "ServiceNow" from the plugins list as: "Source" And Navigate to the properties page of plugin: "ServiceNow" And configure ServiceNow source plugin for table: "RECEIVING_SLIP_LINE" in the Table mode And fill Credentials section for pipeline user + And Select dropdown plugin property: "valueType" with option value: "Display" And Enter input plugin property: "startDate" with value: "start.date" And Enter input plugin property: "endDate" with value: "end.date" Then Validate "ServiceNow" plugin properties And Capture the generated Output Schema And Close the Plugin Properties page And Select Sink plugin: "BigQueryTable" from the plugins list - And Connect source as "ServiceNow" and sink as "BigQuery" to establish connection + And Connect source as "ServiceNow" and sink as "BigQueryTable" to establish connection And Navigate to the properties page of plugin: "BigQuery" And Replace input plugin property: "project" with value: "projectId" - And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + Then Enter input plugin property: "datasetProject" with value: "datasetprojectId" And Configure BigQuery sink plugin for Dataset and Table Then Validate "BigQuery" plugin properties And Close the Plugin Properties page And Preview and run the pipeline Then Verify the preview of pipeline is "success" - And Click on the Preview Data link on the Sink plugin node: "BigQueryTable" + And Click on the Preview Data link on the Sink plugin node: "BigQuery" Then Verify sink plugin's Preview Data for Input Records table and the Input Schema matches the Output Schema of Source plugin @TS-SN-RNTM-2 @SN_SOURCE_CONFIG @SN_RECEIVING_SLIP_LINE @BQ_SINK @@ -55,7 +56,7 @@ Feature: ServiceNow Source - Run time scenarios Then Validate "ServiceNow" plugin properties And Close the Plugin Properties page And Select Sink plugin: "BigQueryTable" from the plugins list - And Connect source as "ServiceNow" and sink as "BigQuery" to establish connection + And Connect source as "ServiceNow" and sink as "BigQueryTable" to establish connection And Navigate to the properties page of plugin: "BigQuery" And Replace input plugin property: "project" with value: "projectId" And Enter input plugin property: "datasetProject" with value: "datasetprojectId" @@ -88,7 +89,7 @@ Feature: ServiceNow Source - Run time scenarios Then Validate "ServiceNow" plugin properties And Close the Plugin Properties page And Select Sink plugin: "BigQueryTable" from the plugins list - And Connect source as "ServiceNow" and sink as "BigQuery" to establish connection + And Connect source as "ServiceNow" and sink as "BigQueryTable" to establish connection And Navigate to the properties page of plugin: "BigQuery" And Replace input plugin property: "project" with value: "projectId" And Enter input plugin property: "datasetProject" with value: "datasetprojectId" diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryCommonSteps.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryCommonSteps.java index 6216b8ee..5f8aa538 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryCommonSteps.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryCommonSteps.java @@ -36,7 +36,7 @@ public class BigQueryCommonSteps { public void configureBqSinkPlugin() { String referenceName = "Test" + RandomStringUtils.randomAlphanumeric(10); CdfBigQueryPropertiesActions.enterBigQueryReferenceName(referenceName); - CdfBigQueryPropertiesActions.enterBigQueryDataset(TestSetupHooks.bqTargetDataset); + CdfBigQueryPropertiesActions.enterBigQueryDataset(PluginPropertyUtils.pluginProp("dataset")); CdfBigQueryPropertiesActions.enterBigQueryTable(TestSetupHooks.bqTargetTable); } @@ -44,12 +44,13 @@ public void configureBqSinkPlugin() { public void configureBqMultiTableSinkPlugin() { String referenceName = "Test" + RandomStringUtils.randomAlphanumeric(10); CdfBigQueryPropertiesActions.enterBigQueryReferenceName(referenceName); - CdfBigQueryPropertiesActions.enterBigQueryDataset(PluginPropertyUtils.pluginProp("bq.target.dataset2")); + CdfBigQueryPropertiesActions.enterBigQueryDataset(PluginPropertyUtils.pluginProp("dataset")); } @Then("Verify count of no of records transferred to the target BigQuery Table") public void getCountOfNoOfRecordsTransferredToTargetBigQueryTable() throws IOException, InterruptedException { - int countRecords = BigQueryClient.countBqQuery(TestSetupHooks.bqTargetDataset, TestSetupHooks.bqTargetTable); + int countRecords = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("dataset"), + TestSetupHooks.bqTargetTable); Assert.assertEquals("Number of records transferred to BigQuery should be equal to " + "records out count displayed on the Source plugin: ", countRecords, CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut()); @@ -67,7 +68,7 @@ public void configureBqSourcePlugin() throws IOException, InterruptedException { CdfBigQueryPropertiesActions.enterDatasetProjectId(datasetProjectId); CdfBigQueryPropertiesActions.enterProjectId(projectId); CdfBigQueryPropertiesActions.enterBigQueryReferenceName(referenceName); - CdfBigQueryPropertiesActions.enterBigQueryDataset(TestSetupHooks.bqSourceDataset); + CdfBigQueryPropertiesActions.enterBigQueryDataset(PluginPropertyUtils.pluginProp("dataset")); CdfBigQueryPropertiesActions.enterBigQueryTable(TestSetupHooks.bqSourceTable); } } diff --git a/src/e2e-test/java/io/cdap/plugin/tests/hooks/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/tests/hooks/TestSetupHooks.java index 827a4f6b..f284ace8 100644 --- a/src/e2e-test/java/io/cdap/plugin/tests/hooks/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/tests/hooks/TestSetupHooks.java @@ -81,6 +81,7 @@ public static void createRecordInReceivingSlipLineTable() String recordDetails = "{'number':'" + uniqueId + "'}"; StringEntity entity = new StringEntity(recordDetails); systemId = tableAPIClient.createRecord(TablesInTableMode.RECEIVING_SLIP_LINE.value, entity); + BeforeActions.scenario.write("New Record in Receiving Slip Line table: " + systemId + " created successfully"); } @Before(order = 2, value = "@SN_UPDATE_AGENT_ASSIST_RECOMMENDATION") @@ -234,6 +235,7 @@ public static void updateTempSourceBQTableForServiceOffering() throws IOExceptio public static void setTempTargetBQTable() { bqTargetTable = "TestSN_table" + RandomStringUtils.randomAlphanumeric(10); BeforeActions.scenario.write("BigQuery Target table name: " + bqTargetTable); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTable); } @Before(order = 1, value = "@CONNECTION") diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index b2e28d3a..1a54cc09 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -2,14 +2,13 @@ validationSuccessMessage=No errors found. #Invalid value -invalid.property.tablename=Bad Request. Table: +invalid.property.tablename=ServiceNow API returned an unexpected result or the specified table may not exist. Cause: Http call to ServiceNow instance returned status code 400. Ensure specified table exists in the datasource. invalid.property.startdate=Invalid format for Start date. Correct Format: yyyy-MM-dd invalid.property.enddate=Invalid format for End date. Correct Format: yyyy-MM-dd invalid.property.credentials=Unable to connect to ServiceNow Instance. Ensure properties like Client ID, Client Secret, API Endpoint, User Name, Password are correct. #Logs error message -invalid.tablename.logsmessage=Spark program 'phase-1' failed with error: Errors were encountered during validation. Bad Request. Table: -invalid.credentials.logsmessage=Spark program 'phase-1' failed with error: Errors were encountered during validation. Unable to connect to ServiceNow Instance.. Please check the system logs for more details. -invalid.filters.logsmessage=Spark program 'phase-1' failed with error: Errors were encountered during validation. Invalid format for Start date. Correct Format: yyyy-MM-dd. Please check the system logs for more details. -invalid.tablenames.logsmessage=Spark program 'phase-1' failed with error: Errors were encountered during validation. Bad Request. Table: blahblah is invalid.. Please check the system logs for more details. +invalid.tablename.logsmessage=ServiceNow API returned an unexpected result or the specified table may not exist. +invalid.credentials.logsmessage=Errors were encountered during validation. Unable to connect to ServiceNow Instance.. Please check the system logs for more details. +invalid.filters.logsmessage=Errors were encountered during validation. Invalid format for Start date. Correct Format: yyyy-MM-dd. Please check the system logs for more details. invalid.testconnection.logmessage=Unable to connect to ServiceNow Instance. Ensure properties like Client ID, Client Secret, API Endpoint, User Name, Password are correct. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index d9baec91..945dccf1 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -40,6 +40,8 @@ projectId=cdf-athena datasetprojectId=cdf-athena bq.target.dataset=SN_test_automation bq.target.dataset2=SN_Test_atm +dataset=testbq_bqmt +bqTargetTable=dummy ##ServiceNowSink INSERT=insert diff --git a/src/main/java/io/cdap/plugin/servicenow/restapi/RestAPIResponse.java b/src/main/java/io/cdap/plugin/servicenow/restapi/RestAPIResponse.java index 44a0d0c9..ceeab972 100644 --- a/src/main/java/io/cdap/plugin/servicenow/restapi/RestAPIResponse.java +++ b/src/main/java/io/cdap/plugin/servicenow/restapi/RestAPIResponse.java @@ -43,7 +43,8 @@ public class RestAPIResponse { private static final Gson GSON = new Gson(); private static final String HTTP_ERROR_MESSAGE = "Http call to ServiceNow instance returned status code %d."; private static final String REST_ERROR_MESSAGE = "Rest Api response has errors. Error message: %s."; - private static final Set SUCCESS_CODES = new HashSet<>(Collections.singletonList(HttpStatus.SC_OK)); + private static final Set SUCCESS_CODES = new HashSet<>(Arrays.asList(HttpStatus.SC_CREATED, + HttpStatus.SC_OK)); private final Map headers; private final String responseBody; @Nullable private final ServiceNowAPIException exception;