Skip to content

Commit

Permalink
Build succeeded
Browse files Browse the repository at this point in the history
  • Loading branch information
jiang95-dev committed Oct 11, 2024
1 parent cd3d61c commit 3237070
Show file tree
Hide file tree
Showing 12 changed files with 125 additions and 106 deletions.
9 changes: 9 additions & 0 deletions integrations/spark-3.5/openhouse-spark-itest/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -47,19 +47,28 @@ task statementTest(type: Test) {
filter {
includeTestsMatching 'com.linkedin.openhouse.spark.statementtest.*'
}
jvmArgs \
'--add-exports=java.base/sun.nio.ch=ALL-UNNAMED'
}

task catalogTest(type: Test) {
filter {
includeTestsMatching 'com.linkedin.openhouse.spark.catalogtest.*'
}
jvmArgs \
'--add-exports=java.base/sun.nio.ch=ALL-UNNAMED'
}

test {
filter {
excludeTestsMatching 'com.linkedin.openhouse.spark.statementtest.*'
excludeTestsMatching 'com.linkedin.openhouse.spark.catalogtest.*'
}
jvmArgs \
'--add-opens=java.base/java.nio=ALL-UNNAMED',
'--add-exports=java.base/sun.nio.ch=ALL-UNNAMED',
'--add-opens=java.base/sun.util.calendar=ALL-UNNAMED',
'--add-exports=java.base/sun.util.calendar=ALL-UNNAMED'
}

test.dependsOn statementTest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -286,8 +286,8 @@ public static List<String[]> convertSchemaToFieldArray(String icebergSchema) {
.map(
x ->
x.type().toString().equals("long")
? Arrays.asList(x.name(), "bigint", "").toArray(new String[3])
: Arrays.asList(x.name(), x.type().toString(), "").toArray(new String[3]))
? Arrays.asList(x.name(), "bigint", null).toArray(new String[3])
: Arrays.asList(x.name(), x.type().toString(), null).toArray(new String[3]))
.collect(Collectors.toList());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ public void testCreateTablePartitionedWithNestedColumn() throws Exception {
List<String> transformList =
Arrays.asList("days(time)", "header.time", "truncate(10, header.time)");
List<String> expectedResult =
Arrays.asList("days(time)", "header.time", "truncate(header.time, 10)");
Arrays.asList("days(time)", "bigint", "truncate(10, header.time)");
for (int i = 0; i < transformList.size(); i++) {
String transform = transformList.get(i);
String tableName =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,16 @@ public class AlterTableSchemaTest {
@SneakyThrows
@Test
public void testAlterTableUpdateSchema() {
String mockTableLocation =
mockTableLocationDefaultSchema(TableIdentifier.of("dbAlterS", "tb1"));
Object existingTable =
mockGetTableResponseBody(
"dbAlterS",
"tb1",
"c1",
"dbAlterS.tb1.c1",
"UUID",
mockTableLocationDefaultSchema(TableIdentifier.of("dbAlterS", "tb1")),
mockTableLocation,
"v1",
baseSchema,
null,
Expand All @@ -41,11 +43,12 @@ public void testAlterTableUpdateSchema() {
"c1",
"dbAlterS.tb1.c1",
"UUID",
"file:/loc2",
mockTableLocation,
"v1",
ResourceIoHelper.getSchemaJsonFromResource("evolved_dummy_healthy_schema.json"),
null,
null)));
mockTableService.enqueue(mockResponse(200, existingTable)); // doRefresh()

Assertions.assertDoesNotThrow(
() -> spark.sql("ALTER TABLE openhouse.dbAlterS.tb1 ADD columns (favorite_number int)"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ public void testSetTableProps() {
tblProps.put(key, value);
mockTableService.enqueue(
mockResponse(200, decorateResponse(existingTable, tblProps))); // doCommit()
mockTableService.enqueue(mockResponse(200, existingTable)); // doRefresh()

Assertions.assertDoesNotThrow(
() ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import static com.linkedin.openhouse.spark.SparkTestBase.*;

import com.google.common.collect.ImmutableList;
import com.linkedin.openhouse.gen.tables.client.model.GetTableResponseBody;
import com.linkedin.openhouse.spark.SparkTestBase;
import org.apache.iceberg.catalog.TableIdentifier;
import org.junit.jupiter.api.Assertions;
Expand All @@ -20,23 +21,24 @@ public void testSimpleCreateTimePartitionAndClusteredTable() {
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()

mockTableService.enqueue(
mockResponse(
201,
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitionedclustered.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
"PARTITIONED BY (days(timestampCol), name, id, count)"),
"v1",
baseSchema,
null,
null))); // doCommit()
GetTableResponseBody getTableResponseBody =
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitionedclustered.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
"PARTITIONED BY (days(timestampCol), name, id, count)"),
"v1",
baseSchema,
null,
null);

mockTableService.enqueue(mockResponse(201, getTableResponseBody)); // doCommit()
mockTableService.enqueue(mockResponse(200, getTableResponseBody)); // doRefresh()

Assertions.assertDoesNotThrow(
() ->
Expand All @@ -53,23 +55,24 @@ public void testCreateTimePartitionAndTransformClusteredTable() {
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()

mockTableService.enqueue(
mockResponse(
201,
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitionedclustered.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
"PARTITIONED BY (days(timestampCol), truncate(100, name))"),
"v1",
baseSchema,
null,
null))); // doCommit()
GetTableResponseBody getTableResponseBody =
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitionedclustered.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
"PARTITIONED BY (days(timestampCol), truncate(100, name))"),
"v1",
baseSchema,
null,
null);

mockTableService.enqueue(mockResponse(201, getTableResponseBody)); // doCommit()
mockTableService.enqueue(mockResponse(200, getTableResponseBody)); // doRefresh()

Assertions.assertDoesNotThrow(
() ->
Expand All @@ -87,23 +90,24 @@ public void testCreateTimePartitionedTableSuccessful() {
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()

mockTableService.enqueue(
mockResponse(
201,
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitioned.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
String.format("PARTITIONED BY (%s(timestampCol))", transform)),
"v1",
baseSchema,
null,
null))); // doCommit()
GetTableResponseBody getTableResponseBody =
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitioned.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
String.format("PARTITIONED BY (%s(timestampCol))", transform)),
"v1",
baseSchema,
null,
null);

mockTableService.enqueue(mockResponse(201, getTableResponseBody)); // doCommit()
mockTableService.enqueue(mockResponse(200, getTableResponseBody)); // doRefresh()

Assertions.assertDoesNotThrow(
() ->
Expand All @@ -126,23 +130,24 @@ public void testCreateStringClusteringTableSuccessful() {
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()

mockTableService.enqueue(
mockResponse(
201,
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitioned.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
String.format("PARTITIONED BY (%s)", transform)),
"v1",
baseSchema,
null,
null))); // doCommit()
GetTableResponseBody getTableResponseBody =
mockGetTableResponseBody(
"dbCreate",
tbName,
"c1",
"dbCreate.tbpartitioned.c1",
"UUID",
mockTableLocation(
TableIdentifier.of("dbDesc", tbName),
convertSchemaToDDLComponent(baseSchema),
String.format("PARTITIONED BY (%s)", transform)),
"v1",
baseSchema,
null,
null);

mockTableService.enqueue(mockResponse(201, getTableResponseBody)); // doCommit()
mockTableService.enqueue(mockResponse(200, getTableResponseBody)); // doRefresh()

Assertions.assertDoesNotThrow(
() ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import static com.linkedin.openhouse.spark.SparkTestBase.mockTableService;
import static com.linkedin.openhouse.spark.SparkTestBase.spark;

import com.linkedin.openhouse.gen.tables.client.model.GetTableResponseBody;
import com.linkedin.openhouse.relocated.org.springframework.web.reactive.function.client.WebClientResponseException;
import com.linkedin.openhouse.spark.SparkTestBase;
import org.apache.iceberg.catalog.TableIdentifier;
Expand All @@ -21,20 +22,20 @@ public void testCreateTable() {
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()
mockTableService.enqueue(mockResponse(404, mockGetAllTableResponseBody())); // doRefresh()

mockTableService.enqueue(
mockResponse(
201,
mockGetTableResponseBody(
"dbCreate",
"tb1",
"c1",
"dbCreate.tb1.c1",
"UUID",
mockTableLocationDefaultSchema(TableIdentifier.of("dbCreate", "tb1")),
"v1",
baseSchema,
null,
null))); // doCommit()
GetTableResponseBody getTableResponseBody =
mockGetTableResponseBody(
"dbCreate",
"tb1",
"c1",
"dbCreate.tb1.c1",
"UUID",
mockTableLocationDefaultSchema(TableIdentifier.of("dbCreate", "tb1")),
"v1",
baseSchema,
null,
null);
mockTableService.enqueue(mockResponse(201, getTableResponseBody)); // doCommit()
mockTableService.enqueue(mockResponse(200, getTableResponseBody)); // doRefresh()

String ddlWithSchema =
"CREATE TABLE openhouse.dbCreate.tb1 (" + convertSchemaToDDLComponent(baseSchema) + ")";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ public void testCreateTableWithPropsSuccessful() {
tblProps.put("k", "v");
GetTableResponseBody responseWithProp = decorateResponse(mockResponse, tblProps);
mockTableService.enqueue(mockResponse(201, responseWithProp)); // doCommit()
mockTableService.enqueue(mockResponse(200, responseWithProp)); // doRefresh()

Assertions.assertDoesNotThrow(
() ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ public void testDescribeTable() {
null)));

Dataset<Row> rows = spark.sql("DESCRIBE TABLE openhouse.dbDesc.tb1");
spark.sql("DESCRIBE TABLE openhouse.dbDesc.tb1").show(false);
validateSchema(rows, baseSchema);
}

Expand All @@ -50,11 +51,10 @@ public void testDescribeTableDoesNotExist() {
AnalysisException.class,
() -> spark.sql("DESCRIBE TABLE openhouse.dbDesc.tbNotExist").show());

Assertions.assertEquals(
"Table or view not found for 'DESCRIBE TABLE': openhouse.dbDesc.tbNotExist; line 1 pos 0;\n"
+ "'DescribeRelation false\n"
+ "+- 'UnresolvedTableOrView [openhouse, dbDesc, tbNotExist], DESCRIBE TABLE, true\n",
ex.getMessage());
Assertions.assertTrue(
ex.getMessage()
.contains(
"[TABLE_OR_VIEW_NOT_FOUND] The table or view `openhouse`.`dbDesc`.`tbNotExist` cannot be found. Verify the spelling and correctness of the schema and catalog."));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,10 @@ public void testDropTable() {
Assertions.assertThrows(
AnalysisException.class, () -> spark.sql("DESCRIBE TABLE openhouse.dbDrop.t1").show());

Assertions.assertEquals(
"Table or view not found for 'DESCRIBE TABLE': openhouse.dbDrop.t1; line 1 pos 0;\n"
+ "'DescribeRelation false\n"
+ "+- 'UnresolvedTableOrView [openhouse, dbDrop, t1], DESCRIBE TABLE, true\n",
ex.getMessage());
Assertions.assertTrue(
ex.getMessage()
.contains(
"[TABLE_OR_VIEW_NOT_FOUND] The table or view `openhouse`.`dbDrop`.`t1` cannot be found. Verify the spelling and correctness of the schema and catalog."));
}

@Test
Expand All @@ -58,11 +57,10 @@ public void testDropTableNotExist() {
AnalysisException ex =
Assertions.assertThrows(AnalysisException.class, () -> spark.sql(ddl).show());

Assertions.assertEquals(
"Table or view not found for 'DROP TABLE': openhouse.dbDrop.t1; line 1 pos 0;\n"
+ "'DropTable false, false\n"
+ "+- 'UnresolvedTableOrView [openhouse, dbDrop, t1], DROP TABLE, true\n",
ex.getMessage());
Assertions.assertTrue(
ex.getMessage()
.contains(
"[TABLE_OR_VIEW_NOT_FOUND] The table or view `openhouse`.`dbDrop`.`t1` cannot be found. Verify the spelling and correctness of the schema and catalog."));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ public void testShowTables() {
.map(row -> row.mkString("."))
.collect(Collectors.toList());

Assertions.assertTrue(actualRows.containsAll(ImmutableList.of("dbShow.tb1", "dbShow.tb2")));
Assertions.assertTrue(
actualRows.containsAll(ImmutableList.of("dbShow.tb1.false", "dbShow.tb2.false")));
}

@Test
Expand Down
Loading

0 comments on commit 3237070

Please sign in to comment.