From b5895b3ac2045039853c6a127eaaa1a7ce317b22 Mon Sep 17 00:00:00 2001 From: neerajsinghal Date: Fri, 15 Sep 2023 18:22:27 +0530 Subject: [PATCH 1/4] E2E Scenarios written for above-mentioned tests. testUpsertOperationWithDedupeSourceData() testUpdateOperationWithDedupeSourceData() _ need to work on table creation hooks. --- .../source/BigQueryToBigQuery.feature | 91 +++++++++++++++++++ .../common/stepsdesign/TestSetupHooks.java | 58 ++++++++++++ 2 files changed, 149 insertions(+) diff --git a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature index f3d4347b24..d3e906b999 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature @@ -262,3 +262,94 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + + @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST + Scenario: Test hook + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "update" + Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" + Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" + Then Enter key for plugin property: "dedupeBy" with values: "float_value" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + + + + @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST + Scenario: Test hook + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" + Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" + Then Enter key for plugin property: "dedupeBy" with values: "float_value" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + + diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index 002f713655..ac13379bdc 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -920,4 +920,62 @@ public static void createSourceBQUpdateTable() throws IOException, InterruptedEx PluginPropertyUtils.addPluginProp(" bqTargetTable", bqTargetTable); BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully"); } + + @Before(value = "@BQ_UPDATE_SOURCE_TEST") + public static void createSourceBQSourceTable() throws IOException, InterruptedException { + + bqSourceTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ source table name - " + bqSourceTable); + + + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " + + "(string_value STRING, int_value INT64, float_value FLOAT64,boolean_value BOOL)"); + + + + try { + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable + "` " + + "(string_value, int_value, float_value,boolean_value)" + + "VALUES" + "('string_1', 1, 0.1,true)," + + "('string_1', 2, 0.2,false)," + + "('string_3', 3, 0.3,false)"); + + + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + + PluginPropertyUtils.addPluginProp(" bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); + } + + @Before(value = "@BQ_UPDATE_SINK_TEST") + public static void createSourceBQSinkTable() throws IOException, InterruptedException { + + bqTargetTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTable); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTable); + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqTargetTable + "` " + + "(string_value STRING, int_value INT64, float_value FLOAT64,boolean_value BOOL)"); + try { + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqTargetTable + "` " + + "(string_value, int_value, float_value,boolean_value)" + + "VALUES" + "('string_0', 0, 0,true)," + + "('string_1', 10, 1.1,false)"); + + + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + + PluginPropertyUtils.addPluginProp(" bqTargetTable", bqTargetTable); + BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully"); + } + + } From e6789b0c0a460d94dde8ca37ff6a2acbbece7928 Mon Sep 17 00:00:00 2001 From: neerajsinghal Date: Fri, 15 Sep 2023 18:38:21 +0530 Subject: [PATCH 2/4] Revert "E2E Scenarios written for above-mentioned tests." This reverts commit b5895b3ac2045039853c6a127eaaa1a7ce317b22. --- .../source/BigQueryToBigQuery.feature | 91 ------------------- .../common/stepsdesign/TestSetupHooks.java | 58 ------------ 2 files changed, 149 deletions(-) diff --git a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature index d3e906b999..f3d4347b24 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature @@ -262,94 +262,3 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table - - - @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST - Scenario: Test hook - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Source" - When Select plugin: "BigQuery" from the plugins list as: "Source" - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection - Then Navigate to the properties page of plugin: "BigQuery" - And Enter input plugin property: "referenceName" with value: "Reference" - And Replace input plugin property: "project" with value: "projectId" - And Enter input plugin property: "datasetProject" with value: "datasetprojectId" - And Replace input plugin property: "dataset" with value: "dataset" - And Enter input plugin property: "table" with value: "bqSourceTable" - Then Click on the Get Schema button - Then Validate "BigQuery" plugin properties - And Close the Plugin Properties page - Then Navigate to the properties page of plugin: "BigQuery2" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - And Select radio button plugin property: "operation" with value: "update" - Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" - Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" - Then Enter key for plugin property: "dedupeBy" with values: "float_value" - Then Click plugin property: "updateTableSchema" - Then Validate "BigQuery" plugin properties - Then Close the BigQuery properties - Then Save the pipeline - Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs - Then Close the preview - Then Deploy the pipeline - Then Run the Pipeline in Runtime - Then Wait till pipeline is in running state - Then Open and capture logs - Then Verify the pipeline status is "Succeeded" - - - - @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST - Scenario: Test hook - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Source" - When Select plugin: "BigQuery" from the plugins list as: "Source" - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection - Then Navigate to the properties page of plugin: "BigQuery" - And Enter input plugin property: "referenceName" with value: "Reference" - And Replace input plugin property: "project" with value: "projectId" - And Enter input plugin property: "datasetProject" with value: "datasetprojectId" - And Replace input plugin property: "dataset" with value: "dataset" - And Enter input plugin property: "table" with value: "bqSourceTable" - Then Click on the Get Schema button - Then Validate "BigQuery" plugin properties - And Close the Plugin Properties page - Then Navigate to the properties page of plugin: "BigQuery2" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - And Select radio button plugin property: "operation" with value: "upsert" - Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" - Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" - Then Enter key for plugin property: "dedupeBy" with values: "float_value" - Then Click plugin property: "updateTableSchema" - Then Validate "BigQuery" plugin properties - Then Close the BigQuery properties - Then Save the pipeline - Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs - Then Close the preview - Then Deploy the pipeline - Then Run the Pipeline in Runtime - Then Wait till pipeline is in running state - Then Open and capture logs - Then Verify the pipeline status is "Succeeded" - - diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index ac13379bdc..002f713655 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -920,62 +920,4 @@ public static void createSourceBQUpdateTable() throws IOException, InterruptedEx PluginPropertyUtils.addPluginProp(" bqTargetTable", bqTargetTable); BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully"); } - - @Before(value = "@BQ_UPDATE_SOURCE_TEST") - public static void createSourceBQSourceTable() throws IOException, InterruptedException { - - bqSourceTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); - PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); - BeforeActions.scenario.write("BQ source table name - " + bqSourceTable); - - - io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " + - "(string_value STRING, int_value INT64, float_value FLOAT64,boolean_value BOOL)"); - - - - try { - io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable + "` " + - "(string_value, int_value, float_value,boolean_value)" + - "VALUES" + "('string_1', 1, 0.1,true)," + - "('string_1', 2, 0.2,false)," + - "('string_3', 3, 0.3,false)"); - - - - } catch (NoSuchElementException e) { - // Insert query does not return any record. - // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException - } - - PluginPropertyUtils.addPluginProp(" bqSourceTable", bqSourceTable); - BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); - } - - @Before(value = "@BQ_UPDATE_SINK_TEST") - public static void createSourceBQSinkTable() throws IOException, InterruptedException { - - bqTargetTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); - PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTable); - BeforeActions.scenario.write("BQ Target table name - " + bqTargetTable); - io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqTargetTable + "` " + - "(string_value STRING, int_value INT64, float_value FLOAT64,boolean_value BOOL)"); - try { - io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqTargetTable + "` " + - "(string_value, int_value, float_value,boolean_value)" + - "VALUES" + "('string_0', 0, 0,true)," + - "('string_1', 10, 1.1,false)"); - - - - } catch (NoSuchElementException e) { - // Insert query does not return any record. - // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException - } - - PluginPropertyUtils.addPluginProp(" bqTargetTable", bqTargetTable); - BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully"); - } - - } From e92df59cc2df0a06fa6abc496c4befd4a1d043d4 Mon Sep 17 00:00:00 2001 From: neerajsinghal Date: Sun, 17 Sep 2023 22:28:39 +0530 Subject: [PATCH 3/4] E2E Scenarios written for above-mentioned tests. testUpsertOperationWithDedupeSourceData() testUpdateOperationWithDedupeSourceData() _ need to work on table creation hooks. --- .../source/BigQueryToBigQuery.feature | 91 +++++++++++++++++++ .../common/stepsdesign/TestSetupHooks.java | 58 ++++++++++++ 2 files changed, 149 insertions(+) diff --git a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature index f3d4347b24..d3e906b999 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature @@ -262,3 +262,94 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + + @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST + Scenario: Test hook + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "update" + Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" + Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" + Then Enter key for plugin property: "dedupeBy" with values: "float_value" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + + + + @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST + Scenario: Test hook + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + And Select radio button plugin property: "operation" with value: "upsert" + Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" + Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" + Then Enter key for plugin property: "dedupeBy" with values: "float_value" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + + diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index 002f713655..ac13379bdc 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -920,4 +920,62 @@ public static void createSourceBQUpdateTable() throws IOException, InterruptedEx PluginPropertyUtils.addPluginProp(" bqTargetTable", bqTargetTable); BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully"); } + + @Before(value = "@BQ_UPDATE_SOURCE_TEST") + public static void createSourceBQSourceTable() throws IOException, InterruptedException { + + bqSourceTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ source table name - " + bqSourceTable); + + + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " + + "(string_value STRING, int_value INT64, float_value FLOAT64,boolean_value BOOL)"); + + + + try { + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable + "` " + + "(string_value, int_value, float_value,boolean_value)" + + "VALUES" + "('string_1', 1, 0.1,true)," + + "('string_1', 2, 0.2,false)," + + "('string_3', 3, 0.3,false)"); + + + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + + PluginPropertyUtils.addPluginProp(" bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); + } + + @Before(value = "@BQ_UPDATE_SINK_TEST") + public static void createSourceBQSinkTable() throws IOException, InterruptedException { + + bqTargetTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTable); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTable); + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqTargetTable + "` " + + "(string_value STRING, int_value INT64, float_value FLOAT64,boolean_value BOOL)"); + try { + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqTargetTable + "` " + + "(string_value, int_value, float_value,boolean_value)" + + "VALUES" + "('string_0', 0, 0,true)," + + "('string_1', 10, 1.1,false)"); + + + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + + PluginPropertyUtils.addPluginProp(" bqTargetTable", bqTargetTable); + BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully"); + } + + } From 92bd3e80624e5c3000bdc19372275a75e56db14d Mon Sep 17 00:00:00 2001 From: neerajsinghal Date: Wed, 20 Sep 2023 18:48:14 +0530 Subject: [PATCH 4/4] E2E Scenarios written for above-mentioned tests. BQ New additional scenarios added. --- .../source/BigQueryToBigQuery.feature | 164 +++++++++++++++++- .../common/stepsdesign/TestSetupHooks.java | 107 +++++++++++- 2 files changed, 263 insertions(+), 8 deletions(-) diff --git a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature index d3e906b999..cb4bf2a1a5 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature @@ -161,7 +161,7 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Enter BigQuery sink property table name Then Toggle BigQuery sink property truncateTable to true Then Toggle BigQuery sink property updateTableSchema to true - Then Enter BigQuery sink property partition field "bqPartitionFieldTime" + Then Enter BigQuery sink property partition field "transaction_date" Then Validate "BigQuery" plugin properties Then Close the BigQuery properties Then Connect source as "BigQuery" and sink as "BigQuery" to establish connection @@ -264,8 +264,8 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table - @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST - Scenario: Test hook + @BQ_UPSERT_SOURCE_TEST @BQ_UPSERT_SINK_TEST + Scenario: Verify scenario form BigQuery To to ensure that upsert operations are performed without updating the destination table Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "BigQuery" from the plugins list as: "Source" @@ -287,7 +287,7 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" - And Select radio button plugin property: "operation" with value: "update" + And Select radio button plugin property: "operation" with value: "upsert" Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" Then Enter key for plugin property: "dedupeBy" with values: "float_value" @@ -310,7 +310,7 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data @BQ_UPDATE_SOURCE_TEST @BQ_UPDATE_SINK_TEST - Scenario: Test hook + Scenario: Verify form BigQuery To to ensure that update operations are performed and the ensure that the duplicate entries has been removed to sink. Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "BigQuery" from the plugins list as: "Source" @@ -332,7 +332,7 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" - And Select radio button plugin property: "operation" with value: "upsert" + And Select radio button plugin property: "operation" with value: "update" Then Enter Value for plugin property table key : "relationTableKey" with values: "string_value" Then Select dropdown plugin property: "dedupeBy" with option value: "DESC" Then Enter key for plugin property: "dedupeBy" with values: "float_value" @@ -353,3 +353,155 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Verify the pipeline status is "Succeeded" + @BQ_NULL_MODE_SOURCE_TEST @BQ_SINK_TEST + Scenario: Validate Successful record BigQuery source plugin with all NULL values in one column and Few NULL value in different cloumn. + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + + + @BQ_TIME_STAMP_SOURCE_TEST @BQ_SINK_TEST + Scenario: Verify record insert from source BigQuery plugin with partition type Time (Date/timestamp/datetime). + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Enter input plugin property: "partitionByField" with value: "partiontion_by_field_value" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + #Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + + @BQ_INSERT_SOURCE_TEST @BQ_INSERT_SECOND_SOURCE_TEST @BQ_SINK_TEST + Scenario: Verify BigQuery With Different Schema RecordName + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Transform" + When Select plugin: "Wrangler" from the plugins list as: "Transform" + When Select plugin: "Wrangler" from the plugins list as: "Transform" + Then Move plugins: "Wrangler" by xOffset 250 and yOffset 300 + Then Move plugins: "BigQuery" by xOffset 100 and yOffset 200 + Then Connect plugins: "BigQuery" and "Wrangler2" to establish connection + Then Connect plugins: "BigQuery2" and "Wrangler" to establish connection + Then Connect plugins: "Wrangler" and "Wrangler2" to establish connection + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "Wrangler2" and "BigQuery3" to establish connection + Then Navigate to the properties page of plugin: "BigQuery2" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery2" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Wrangler" + Then Enter textarea plugin property: "directives" with value: "drop :TableName" + Then Validate "Wrangler" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Wrangler2" + Then Enter textarea plugin property: "directives" with value: "rename :EmployeeID :EmpID;" + Then Validate "Wrangler2" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery3" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + +# Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index ac13379bdc..92631968c1 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -59,6 +59,7 @@ public class TestSetupHooks { public static String gcsTargetBucketName = StringUtils.EMPTY; public static String bqTargetTable = StringUtils.EMPTY; public static String bqSourceTable = StringUtils.EMPTY; +public static String bqSecondSourceTable = StringUtils.EMPTY; public static String bqSourceView = StringUtils.EMPTY; public static String pubSubTargetTopic = StringUtils.EMPTY; public static String spannerInstance = StringUtils.EMPTY; @@ -921,7 +922,7 @@ public static void createSourceBQUpdateTable() throws IOException, InterruptedEx BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully"); } - @Before(value = "@BQ_UPDATE_SOURCE_TEST") + @Before(value = "@BQ_UPSERT_SOURCE_TEST") public static void createSourceBQSourceTable() throws IOException, InterruptedException { bqSourceTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); @@ -952,7 +953,7 @@ public static void createSourceBQSourceTable() throws IOException, InterruptedEx BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); } - @Before(value = "@BQ_UPDATE_SINK_TEST") + @Before(value = "@BQ_UPSERT_SINK_TEST") public static void createSourceBQSinkTable() throws IOException, InterruptedException { bqTargetTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); @@ -978,4 +979,106 @@ public static void createSourceBQSinkTable() throws IOException, InterruptedExce } + @Before(value = "@BQ_UPDATE_SOURCE_TEST") + public static void createSourceBQTable() throws IOException, InterruptedException { + + bqSourceTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ source table name - " + bqSourceTable); + + + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " + + "(string_value STRING, int_value INT64, float_value FLOAT64,boolean_value BOOL)"); + + + + try { + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable + "` " + + "(string_value, int_value, float_value,boolean_value)" + + "VALUES" + "('string_1', 1, 0.1,true)," + + "('string_1', 2, 0.2,false)"); + + + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + + PluginPropertyUtils.addPluginProp(" bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); + } + + + @Before(value = "@BQ_NULL_MODE_SOURCE_TEST") + public static void createNullSourceBQTable() throws IOException, InterruptedException { + bqSourceTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ source table name - " + bqSourceTable); + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " + + "(Address STRING, id INT64, Firstname STRING,LastName STRING)"); + + try { + io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable + "` " + + "(Address, id, Firstname, LastName)" + + "VALUES" + "('Agra', 1, 'Neeraj','')," + + "('Noida', 2, '','')"); + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + + PluginPropertyUtils.addPluginProp(" bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); + } + + + + + + + @Before(value = "@BQ_TIME_STAMP_SOURCE_TEST") + public static void createTimeStampBQTable() throws IOException, InterruptedException { + bqSourceTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ source table name - " + bqSourceTable); + BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " + + "(Address STRING, transaction_date DATE, Firstname STRING)"); + + try { + BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable + "` " + + "(Address, transaction_date, Firstname)" + + "VALUES" + "('Agra', '2021-02-20', 'Neeraj')," + + "('Noida', '2021-02-21',''),"+"('Gudgaon', '2021-02-22', 'singhal')"); + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + + PluginPropertyUtils.addPluginProp(" bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); + } + + @Before( order = 2 , value = "@BQ_INSERT_SECOND_SOURCE_TEST") + public static void createSecondSourceBQInsertTable() throws IOException, InterruptedException { + bqSecondSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-" , "_"); + BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSecondSourceTable + "` " + + "(PersonID INT64, LastName STRING, TableName STRING, FirstName STRING)"); + try { + BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSecondSourceTable + "` " + + "(PersonID, LastName, TableName, FirstName)" + + "VALUES" + "(5, 'Rani', 'Test','Raja')"); + + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSecondSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSecondSourceTable + " created successfully"); + } + + + }