Skip to content

Commit 9fec4e2

Browse files
committed
Make naming consistent
1 parent 2e61e44 commit 9fec4e2

File tree

6 files changed

+21
-21
lines changed

6 files changed

+21
-21
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ public String toString() {
7777
return toDDL();
7878
}
7979

80-
protected String toSQL(NamedReference[] columns) {
80+
protected String toDDL(NamedReference[] columns) {
8181
StringJoiner joiner = new StringJoiner(", ");
8282

8383
for (NamedReference column : columns) {

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/Check.java

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -44,26 +44,26 @@
4444
*/
4545
public class Check extends BaseConstraint {
4646

47-
private final String predicateSQL;
47+
private final String predicateSql;
4848
private final Predicate predicate;
4949

5050
private Check(
5151
String name,
52-
String predicateSQL,
52+
String predicateSql,
5353
Predicate predicate,
5454
boolean enforced,
5555
ValidationStatus validationStatus,
5656
boolean rely) {
5757
super(name, enforced, validationStatus, rely);
58-
this.predicateSQL = predicateSQL;
58+
this.predicateSql = predicateSql;
5959
this.predicate = predicate;
6060
}
6161

6262
/**
6363
* Returns the SQL representation of the search condition (Spark SQL dialect).
6464
*/
65-
public String predicateSQL() {
66-
return predicateSQL;
65+
public String predicateSql() {
66+
return predicateSql;
6767
}
6868

6969
/**
@@ -75,7 +75,7 @@ public Predicate predicate() {
7575

7676
@Override
7777
protected String definition() {
78-
return String.format("CHECK (%s)", predicateSQL != null ? predicateSQL : predicate);
78+
return String.format("CHECK (%s)", predicateSql != null ? predicateSql : predicate);
7979
}
8080

8181
@Override
@@ -84,7 +84,7 @@ public boolean equals(Object other) {
8484
if (other == null || getClass() != other.getClass()) return false;
8585
Check that = (Check) other;
8686
return Objects.equals(name(), that.name()) &&
87-
Objects.equals(predicateSQL, that.predicateSQL) &&
87+
Objects.equals(predicateSql, that.predicateSql) &&
8888
Objects.equals(predicate, that.predicate) &&
8989
enforced() == that.enforced() &&
9090
Objects.equals(validationStatus(), that.validationStatus()) &&
@@ -93,12 +93,12 @@ public boolean equals(Object other) {
9393

9494
@Override
9595
public int hashCode() {
96-
return Objects.hash(name(), predicateSQL, predicate, enforced(), validationStatus(), rely());
96+
return Objects.hash(name(), predicateSql, predicate, enforced(), validationStatus(), rely());
9797
}
9898

9999
public static class Builder extends BaseConstraint.Builder<Builder, Check> {
100100

101-
private String predicateSQL;
101+
private String predicateSql;
102102
private Predicate predicate;
103103

104104
Builder(String name) {
@@ -110,8 +110,8 @@ protected Builder self() {
110110
return this;
111111
}
112112

113-
public Builder predicateSQL(String sql) {
114-
this.predicateSQL = sql;
113+
public Builder predicateSql(String sql) {
114+
this.predicateSql = sql;
115115
return this;
116116
}
117117

@@ -121,12 +121,12 @@ public Builder predicate(Predicate predicate) {
121121
}
122122

123123
public Check build() {
124-
if (predicateSQL == null && predicate == null) {
124+
if (predicateSql == null && predicate == null) {
125125
throw new SparkIllegalArgumentException(
126126
"INTERNAL_ERROR",
127127
Map.of("message", "Predicate SQL and expression can't be both null in CHECK"));
128128
}
129-
return new Check(name(), predicateSQL, predicate, enforced(), validationStatus(), rely());
129+
return new Check(name(), predicateSql, predicate, enforced(), validationStatus(), rely());
130130
}
131131
}
132132
}

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/ForeignKey.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,9 +85,9 @@ public NamedReference[] referencedColumns() {
8585
protected String definition() {
8686
return String.format(
8787
"FOREIGN KEY (%s) REFERENCES %s (%s)",
88-
toSQL(columns),
88+
toDDL(columns),
8989
refTable,
90-
toSQL(refColumns));
90+
toDDL(refColumns));
9191
}
9292

9393
@Override

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/PrimaryKey.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ public NamedReference[] columns() {
6161

6262
@Override
6363
protected String definition() {
64-
return String.format("PRIMARY KEY (%s)", toSQL(columns));
64+
return String.format("PRIMARY KEY (%s)", toDDL(columns));
6565
}
6666

6767
@Override

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/Unique.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ public NamedReference[] columns() {
5959

6060
@Override
6161
protected String definition() {
62-
return String.format("UNIQUE (%s)", toSQL(columns));
62+
return String.format("UNIQUE (%s)", toDDL(columns));
6363
}
6464

6565
@Override

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/ConstraintSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ class ConstraintSuite extends SparkFunSuite {
2828

2929
test("CHECK constraint toDDL") {
3030
val con1 = Constraint.check("con1")
31-
.predicateSQL("id > 10")
31+
.predicateSql("id > 10")
3232
.enforced(true)
3333
.validationStatus(ValidationStatus.VALID)
3434
.rely(true)
@@ -49,7 +49,7 @@ class ConstraintSuite extends SparkFunSuite {
4949
assert(con2.toDDL == "CONSTRAINT con2 CHECK (a.`b.c`.d = 1) NOT ENFORCED VALID RELY")
5050

5151
val con3 = Constraint.check("con3")
52-
.predicateSQL("a.b.c <=> 1")
52+
.predicateSql("a.b.c <=> 1")
5353
.predicate(
5454
new Predicate(
5555
"<=>",
@@ -62,7 +62,7 @@ class ConstraintSuite extends SparkFunSuite {
6262
.build()
6363
assert(con3.toDDL == "CONSTRAINT con3 CHECK (a.b.c <=> 1) NOT ENFORCED INVALID NORELY")
6464

65-
val con4 = Constraint.check("con4").predicateSQL("a = 1").build()
65+
val con4 = Constraint.check("con4").predicateSql("a = 1").build()
6666
assert(con4.toDDL == "CONSTRAINT con4 CHECK (a = 1) ENFORCED UNVALIDATED NORELY")
6767
}
6868

0 commit comments

Comments
 (0)