Skip to content

Commit

Permalink
Update self serve replication SQL to accept daily granularity
Browse files Browse the repository at this point in the history
  • Loading branch information
chenselena committed Oct 21, 2024
1 parent d02b2ed commit decb13d
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,20 +43,20 @@ public void setupSpark() {

@Test
public void testSimpleSetReplicationPolicy() {
String replicationConfigJson = "[{\"destination\":\"a\", \"interval\":\"24H\"}]";
String replicationConfigJson = "[{\"destination\":\"a\", \"interval\":\"12H\"}]";
Dataset<Row> ds =
spark.sql(
"ALTER TABLE openhouse.db.table SET POLICY (REPLICATION = "
+ "({destination:'a', interval:24H}))");
+ "({destination:'a', interval:12H}))");
assert isPlanValid(ds, replicationConfigJson);

// Test support with multiple clusters
replicationConfigJson =
"[{\"destination\":\"a\", \"interval\":\"12H\"}, {\"destination\":\"aa\", \"interval\":\"12H\"}]";
"[{\"destination\":\"a\", \"interval\":\"12H\"}, {\"destination\":\"aa\", \"interval\":\"2D\"}]";
ds =
spark.sql(
"ALTER TABLE openhouse.db.table SET POLICY (REPLICATION = "
+ "({destination:'a', interval:12h}, {destination:'aa', interval:12H}))");
+ "({destination:'a', interval:12h}, {destination:'aa', interval:2d}))");
assert isPlanValid(ds, replicationConfigJson);
}

Expand Down Expand Up @@ -187,7 +187,7 @@ public void testReplicationPolicyWithoutProperSyntax() {
"ALTER TABLE openhouse.db.table SET POLICY (REPLICAT = ({destination: 'aa', interval: '12h'}))")
.show());

// Interval input does not follow 'h/H' format
// Interval input does not follow 'h/H' or 'd/D' format
Assertions.assertThrows(
OpenhouseParseException.class,
() ->
Expand All @@ -196,22 +196,6 @@ public void testReplicationPolicyWithoutProperSyntax() {
"ALTER TABLE openhouse.db.table SET POLICY (REPLICATION = ({destination: 'aa', interval: '12'}))")
.show());

Assertions.assertThrows(
OpenhouseParseException.class,
() ->
spark
.sql(
"ALTER TABLE openhouse.db.table SET POLICY (REPLICATION = ({destination: 'aa', interval: '1D'}))")
.show());

Assertions.assertThrows(
OpenhouseParseException.class,
() ->
spark
.sql(
"ALTER TABLE openhouse.db.table SET POLICY (REPLICATION = ({destination: 'aa', interval: '12d'}))")
.show());

// Missing cluster and interval values
Assertions.assertThrows(
OpenhouseParseException.class,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ replicationPolicyClusterClause

replicationPolicyIntervalClause
: INTERVAL ':' RETENTION_HOUR
| INTERVAL ':' RETENTION_DAY
;

columnRetentionPolicyPatternClause
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,9 @@ class OpenhouseSqlExtensionsAstBuilder (delegate: ParserInterface) extends Openh
}

override def visitReplicationPolicyIntervalClause(ctx: ReplicationPolicyIntervalClauseContext): (String) = {
ctx.RETENTION_HOUR().getText.toUpperCase
if (ctx.RETENTION_HOUR() != null)
ctx.RETENTION_HOUR().getText.toUpperCase()
else ctx.RETENTION_DAY().getText.toUpperCase()
}

override def visitColumnRetentionPolicy(ctx: ColumnRetentionPolicyContext): (String, String) = {
Expand Down

0 comments on commit decb13d

Please sign in to comment.