From c65bf494881e1d3c0b93b2491c91729c10c0cc76 Mon Sep 17 00:00:00 2001 From: vikasrathee-cs Date: Tue, 15 Oct 2024 23:22:21 +0530 Subject: [PATCH] Added fix for date datatype --- .../features/sink/OracleRunTime.feature | 51 +++++++++++++++++++ .../java/io.cdap.plugin/OracleClient.java | 10 ++++ .../common.stepsdesign/TestSetupHooks.java | 30 +++++++++++ .../BigQuery/CreateBQTableQueryFileDate.txt | 1 + .../BigQuery/InsertBQDataQueryFileDate.txt | 1 + .../plugin/oracle/OracleSourceDBRecord.java | 2 +- 6 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 oracle-plugin/src/e2e-test/resources/testdata/BigQuery/CreateBQTableQueryFileDate.txt create mode 100644 oracle-plugin/src/e2e-test/resources/testdata/BigQuery/InsertBQDataQueryFileDate.txt diff --git a/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature b/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature index 67293700b..dd6da5572 100644 --- a/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature +++ b/oracle-plugin/src/e2e-test/features/sink/OracleRunTime.feature @@ -167,3 +167,54 @@ Feature: Oracle - Verify data transfer from BigQuery source to Oracle sink Then Verify the pipeline status is "Succeeded" Then Validate records transferred to target table with record counts of BigQuery table Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table with case + + + @BQ_SOURCE_TEST_DATE @ORACLE_DATE_TABLE + Scenario: To verify data is getting transferred from BigQuery source to Oracle sink successfully when schema is having date and timestamp fields + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "Oracle" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "Oracle" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputDatatypesSchemaSmallCase" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for Oracle sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate records transferred to target table with record counts of BigQuery table + Then Validate the values of records transferred to target Oracle table is equal to the values from source BigQuery table with case diff --git a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/OracleClient.java b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/OracleClient.java index b340ef690..8f71c8ddc 100644 --- a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/OracleClient.java +++ b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/OracleClient.java @@ -350,4 +350,14 @@ public static void deleteTable(String schema, String table) } } } + + public static void createTargetDateTable(String targetTable, String schema) throws SQLException, + ClassNotFoundException { + try (Connection connect = getOracleConnection(); Statement statement = connect.createStatement()) { + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + + "(id varchar2 ,date_col DATE,timestamp_tz_col TIMESTAMP WITH TIME ZONE,timestamp_ltz_col " + + "TIMESTAMP WITH LOCAL TIME ZONE,interval_ym_col INTERVAL YEAR TO MONTH,date_type DATE)"; + statement.executeUpdate(createTargetTableQuery); + } + } } diff --git a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java index c66a001b7..0ce61ba53 100644 --- a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java +++ b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java @@ -386,4 +386,34 @@ public static void deleteTempSourceBQTableSmallCase() throws IOException, Interr BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); PluginPropertyUtils.removePluginProp("bqSourceTable"); } + + @Before(order = 1, value = "@BQ_SOURCE_TEST_DATE") + public static void createTempSourceBQTableWithDateColumns() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFileDate"), + PluginPropertyUtils.pluginProp("InsertBQDataQueryFileDate")); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST_DATE") + public static void deleteTempSourceBQTableWithDateColumns() throws IOException, InterruptedException { + String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); + BigQueryClient.dropBqQuery(bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + } + + @Before(order = 2, value = "@ORACLE_DATE_TABLE") + public static void createOracleTargetTestTable() throws SQLException, ClassNotFoundException { + OracleClient.createTargetDateTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + BeforeActions.scenario.write("Oracle Target Table - " + PluginPropertyUtils.pluginProp("targetTable") + + " created successfully"); + } + + @After(order = 2, value = "@ORACLE_DATE_TABLE") + public static void dropOracleTargetTestTable() throws SQLException, ClassNotFoundException { + OracleClient.deleteTable(PluginPropertyUtils.pluginProp("schema"), + PluginPropertyUtils.pluginProp("targetTable")); + BeforeActions.scenario.write("Oracle Target Table - " + PluginPropertyUtils.pluginProp("targetTable") + + " deleted successfully"); + } } diff --git a/oracle-plugin/src/e2e-test/resources/testdata/BigQuery/CreateBQTableQueryFileDate.txt b/oracle-plugin/src/e2e-test/resources/testdata/BigQuery/CreateBQTableQueryFileDate.txt new file mode 100644 index 000000000..4674ab6b8 --- /dev/null +++ b/oracle-plugin/src/e2e-test/resources/testdata/BigQuery/CreateBQTableQueryFileDate.txt @@ -0,0 +1 @@ +CREATE TABLE `DATASET.TABLE_NAME` (ID STRING NOT NULL,DATE_COL DATETIME, TIMESTAMP_TZ_COL TIMESTAMP, TIMESTAMP_LTZ_COL DATETIME, INTERVAL_YM_COL STRING,DATE_TYPE DATE); \ No newline at end of file diff --git a/oracle-plugin/src/e2e-test/resources/testdata/BigQuery/InsertBQDataQueryFileDate.txt b/oracle-plugin/src/e2e-test/resources/testdata/BigQuery/InsertBQDataQueryFileDate.txt new file mode 100644 index 000000000..d3391ef23 --- /dev/null +++ b/oracle-plugin/src/e2e-test/resources/testdata/BigQuery/InsertBQDataQueryFileDate.txt @@ -0,0 +1 @@ +INSERT INTO `DATASET.TABLE_NAME` (id, date_col, timestamp_tz_col, timestamp_ltz_col, interval_ym_col,date_type) VALUES('2', '2024-10-11', '2024-10-11 14:30:00.123456+05:30', '2024-10-11 14:30:00.123456','2-6','2024-10-11'); \ No newline at end of file diff --git a/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceDBRecord.java b/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceDBRecord.java index 3f7c2a20a..dc2b5a7bd 100644 --- a/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceDBRecord.java +++ b/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceDBRecord.java @@ -160,7 +160,7 @@ protected void writeNonNullToDB(PreparedStatement stmt, Schema fieldSchema, String timestampString = Timestamp.valueOf(localDateTime).toString(); Object timestampWithTimeZone = createOracleTimestamp(stmt.getConnection(), timestampString); stmt.setObject(sqlIndex, timestampWithTimeZone); - } else if (Schema.LogicalType.TIMESTAMP_MICROS.equals(fieldSchema.getLogicalType())) { + } else { // Deprecated: Handle the case when the Timestamp is mapped to CDAP Timestamp type super.writeNonNullToDB(stmt, fieldSchema, fieldName, fieldIndex); }