Skip to content

Commit

Permalink
added cleanup code for gcs resources
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkitCLI committed Oct 22, 2024
1 parent a1a4049 commit 39cabe7
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ Feature: BigQuery sink - Verification of BigQuery to BigQuery successful data tr
Then Verify the pipeline status is "Succeeded"
Then Validate the data transferred from BigQuery to BigQuery with actual And expected file for: "bqUpsertDedupeFile"

@BQ_RECORD_SOURCE_TEST @BQ_SECOND_RECORD_SOURCE_TEST @BQ_SINK_TEST
@BQ_PRIMARY_RECORD_SOURCE_TEST @BQ_SECONDARY_RECORD_SOURCE_TEST @BQ_SINK_TEST
Scenario: Validate successful record transfer from two BigQuery source plugins with different schema record names, taking one extra column in BigQuery source plugin 1,and
using wrangler transformation plugin for removing the extra column and transferring the data in BigQuery sink plugin containing all the columns from both the source plugin.
Given Open Datafusion Project to configure pipeline
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,8 @@ public static void createBucketWithAvroTestFile() throws IOException, URISyntaxE
@After(order = 1, value = "@GCS_CSV_TEST or @GCS_TSV_TEST or @GCS_BLOB_TEST " +
"or @GCS_DELIMITED_TEST or @GCS_TEXT_TEST or @GCS_OUTPUT_FIELD_TEST or @GCS_DATATYPE_1_TEST or " +
"@GCS_DATATYPE_2_TEST or @GCS_READ_RECURSIVE_TEST or @GCS_DELETE_WILDCARD_TEST or @GCS_CSV_RANGE_TEST or" +
" @GCS_PARQUET_TEST or @GCS_AVRO_TEST or @GCS_DATATYPE_TEST or @GCS_AVRO_FILE")
" @GCS_PARQUET_TEST or @GCS_AVRO_TEST or @GCS_DATATYPE_TEST or @GCS_AVRO_FILE or @GCS_CSV or " +
"GCS_MULTIPLE_FILES_TEST or GCS_MULTIPLE_FILES_REGEX_TEST")
public static void deleteSourceBucketWithFile() {
deleteGCSBucket(gcsSourceBucketName);
PluginPropertyUtils.removePluginProp("gcsSourceBucketName");
Expand Down Expand Up @@ -235,7 +236,7 @@ public static void createTargetGCSBucketWithCSVFile() throws IOException, URISyn
BeforeActions.scenario.write("GCS target bucket name - " + gcsTargetBucketName);
}

@After(order = 1, value = "@GCS_SINK_TEST or @GCS_SINK_EXISTING_BUCKET_TEST")
@After(order = 1, value = "@GCS_SINK_TEST or @GCS_SINK_EXISTING_BUCKET_TEST or @GCS_SINK_MULTI_PART_UPLOAD")
public static void deleteTargetBucketWithFile() {
deleteGCSBucket(gcsTargetBucketName);
PluginPropertyUtils.removePluginProp("gcsTargetBucketName");
Expand All @@ -250,7 +251,8 @@ public static void setTempTargetBQTableName() {
BeforeActions.scenario.write("BQ Target table name - " + bqTargetTable);
}

@After(order = 1, value = "@BQ_SINK_TEST")
@After(order = 1, value = "@BQ_SINK_TEST or @BQ_UPSERT_SINK_TEST or @BQ_UPDATE_SINK_DEDUPE_TEST or " +
"@BQ_EXISTING_SINK_TEST or @BQ_UPSERT_DEDUPE_SINK_TEST or @BQ_INSERT_SINK_TEST")
public static void deleteTempTargetBQTable() throws IOException, InterruptedException {
try {
BigQueryClient.dropBqQuery(bqTargetTable);
Expand Down Expand Up @@ -294,7 +296,9 @@ public static void createTempSourceBQTable() throws IOException, InterruptedExce

@After(order = 1, value = "@BQ_SOURCE_TEST or @BQ_PARTITIONED_SOURCE_TEST or @BQ_SOURCE_DATATYPE_TEST or " +
"@BQ_INSERT_SOURCE_TEST or @BQ_UPDATE_SINK_TEST or @BQ_EXISTING_SOURCE_TEST or @BQ_EXISTING_SINK_TEST or " +
"@BQ_EXISTING_SOURCE_DATATYPE_TEST or @BQ_EXISTING_SINK_DATATYPE_TEST")
"@BQ_EXISTING_SOURCE_DATATYPE_TEST or @BQ_EXISTING_SINK_DATATYPE_TEST or @BQ_UPSERT_SOURCE_TEST or " +
"@BQ_NULL_MODE_SOURCE_TEST or @BQ_UPDATE_SOURCE_DEDUPE_TEST or @BQ_INSERT_INT_SOURCE_TEST or " +
"@BQ_TIME_SOURCE_TEST or @BQ_UPSERT_DEDUPE_SOURCE_TEST or @BQ_PRIMARY_RECORD_SOURCE_TEST")
public static void deleteTempSourceBQTable() throws IOException, InterruptedException {
BigQueryClient.dropBqQuery(bqSourceTable);
PluginPropertyUtils.removePluginProp("bqSourceTable");
Expand Down Expand Up @@ -1251,7 +1255,7 @@ public static void createSinkBQDeupeUpsertTable() throws IOException, Interrupte
BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " created successfully");
}

@Before(order = 1, value = "@BQ_RECORD_SOURCE_TEST")
@Before(order = 1, value = "@BQ_PRIMARY_RECORD_SOURCE_TEST")
public static void createSourceBQRecordTable() throws IOException, InterruptedException {
bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_");
io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " +
Expand All @@ -1270,7 +1274,7 @@ public static void createSourceBQRecordTable() throws IOException, InterruptedEx
BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully");
}

@Before(order = 1, value = "@BQ_SECOND_RECORD_SOURCE_TEST")
@Before(order = 1, value = "@BQ_SECONDARY_RECORD_SOURCE_TEST")
public static void createSourceBQSecondRecordTable() throws IOException, InterruptedException {
bqSourceTable2 = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_");
io.cdap.e2e.utils.BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable2 + "` " +
Expand Down Expand Up @@ -1636,4 +1640,12 @@ public static void deleteTargetBqmtTable() throws IOException, InterruptedExcept
}
}
}

@After(order = 1, value = "@BQ_SECONDARY_RECORD_SOURCE_TEST")
public static void deleteTempSource2BQTable() throws IOException, InterruptedException {
BigQueryClient.dropBqQuery(bqSourceTable2);
PluginPropertyUtils.removePluginProp("bqSourceTable2");
BeforeActions.scenario.write("BQ source Table2 " + bqSourceTable2 + " deleted successfully");
bqSourceTable2 = StringUtils.EMPTY;
}
}

0 comments on commit 39cabe7

Please sign in to comment.