Skip to content

Commit 52e61de

Browse files
authored
[SPARK][VARIANT] Add minimal support for variant type in delta-spark (#2923)
<!-- Thanks for sending a pull request! Here are some tips for you: 1. If this is your first time, please read our contributor guidelines: https://github.com/delta-io/delta/blob/master/CONTRIBUTING.md 2. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP] Your PR title ...'. 3. Be sure to keep the PR description updated to reflect all changes. 4. Please write your PR title to summarize what this PR proposes. 5. If possible, provide a concise example to reproduce the issue for a faster review. 6. If applicable, include the corresponding issue number in the PR title and link it in the body. --> #### Which Delta project/connector is this regarding? <!-- Please add the component selected below to the beginning of the pull request title For example: [Spark] Title of my pull request --> - [x] Spark - [ ] Standalone - [ ] Flink - [ ] Kernel - [ ] Other (fill in here) ## Description <!-- - Describe what this PR changes. - Describe why we need the change. If this PR resolves an issue be sure to include "Resolves #XXX" to correctly link and close the issue upon merge. --> Adds the variant table feature to minimally implement the variant type as described in the RFC in #2867. Also disables using variant columns as partition columns. ## How was this patch tested? Added some UTs. More UTs will be merged in followup PRs tested against both spark 3.5 and 4.0 snapshot with ``` build/sbt -DsparkVersion=latest spark/'testOnly org.apache.spark.sql.delta.DeltaVariantSuite' build/sbt -DsparkVersion=master spark/'testOnly org.apache.spark.sql.delta.DeltaVariantSuite' ``` ## Does this PR introduce _any_ user-facing changes? <!-- If yes, please clarify the previous behavior and the change this PR proposes - provide the console output, description and/or an example to show the behavior difference if possible. If possible, please also clarify if this is a user-facing change compared to the released Delta Lake versions or within the unreleased branches such as master. If no, write 'No'. --> no
1 parent e3b58d2 commit 52e61de

File tree

6 files changed

+199
-2
lines changed

6 files changed

+199
-2
lines changed
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
/*
2+
* Copyright (2024) The Delta Lake Project Authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.apache.spark.sql.types
18+
19+
object VariantShims {
20+
21+
/**
22+
* Spark's variant type is implemented for Spark 4.0 and is not implemented in Spark 3.5. Thus,
23+
* any Spark 3.5 DataType cannot be a variant type.
24+
*/
25+
def isVariantType(dt: DataType): Boolean = false
26+
}
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
/*
2+
* Copyright (2021) The Delta Lake Project Authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.apache.spark.sql.types
18+
19+
object VariantShims {
20+
21+
/** Spark's variant type is only implemented in Spark 4.0 and above. */
22+
def isVariantType(dt: DataType): Boolean = dt.isInstanceOf[VariantType]
23+
}

spark/src/main/scala/org/apache/spark/sql/delta/TableFeature.scala

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,8 @@ object TableFeature {
339339
VacuumProtocolCheckTableFeature,
340340
V2CheckpointTableFeature,
341341
RowTrackingFeature,
342-
InCommitTimestampTableFeature)
342+
InCommitTimestampTableFeature,
343+
VariantTypeTableFeature)
343344
if (DeltaUtils.isTesting) {
344345
features ++= Set(
345346
TestLegacyWriterFeature,
@@ -502,6 +503,14 @@ object TimestampNTZTableFeature extends ReaderWriterFeature(name = "timestampNtz
502503
}
503504
}
504505

506+
object VariantTypeTableFeature extends ReaderWriterFeature(name = "variantType-dev")
507+
with FeatureAutomaticallyEnabledByMetadata {
508+
override def metadataRequiresFeatureToBeEnabled(
509+
metadata: Metadata, spark: SparkSession): Boolean = {
510+
SchemaUtils.checkForVariantTypeColumnsRecursively(metadata.schema)
511+
}
512+
}
513+
505514
object DeletionVectorsTableFeature
506515
extends ReaderWriterFeature(name = "deletionVectors")
507516
with FeatureAutomaticallyEnabledByMetadata {

spark/src/main/scala/org/apache/spark/sql/delta/schema/SchemaUtils.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1271,6 +1271,14 @@ def normalizeColumnNamesInDataType(
12711271
SchemaUtils.typeExistsRecursively(schema)(_.isInstanceOf[TimestampNTZType])
12721272
}
12731273

1274+
1275+
/**
1276+
* Returns 'true' if any VariantType exists in the table schema.
1277+
*/
1278+
def checkForVariantTypeColumnsRecursively(schema: StructType): Boolean = {
1279+
SchemaUtils.typeExistsRecursively(schema)(VariantShims.isVariantType(_))
1280+
}
1281+
12741282
/**
12751283
* Find the unsupported data types in a `DataType` recursively. Add the unsupported data types to
12761284
* the provided `unsupportedDataTypes` buffer.
@@ -1303,6 +1311,7 @@ def normalizeColumnNamesInDataType(
13031311
case DateType =>
13041312
case TimestampType =>
13051313
case TimestampNTZType =>
1314+
case dt if VariantShims.isVariantType(dt) =>
13061315
case BinaryType =>
13071316
case _: DecimalType =>
13081317
case a: ArrayType =>

spark/src/main/scala/org/apache/spark/sql/delta/util/PartitionUtils.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -605,7 +605,8 @@ private[delta] object PartitionUtils {
605605

606606
partitionColumnsSchema(schema, partitionColumns, caseSensitive).foreach {
607607
field => field.dataType match {
608-
case _: AtomicType => // OK
608+
// Variant types are not orderable and thus cannot be partition columns.
609+
case a: AtomicType if !VariantShims.isVariantType(a) => // OK
609610
case _ => throw DeltaErrors.cannotUseDataTypeForPartitionColumnError(field)
610611
}
611612
}
Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
/*
2+
* Copyright (2021) The Delta Lake Project Authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.apache.spark.sql.delta
18+
19+
import org.apache.spark.sql.delta.actions.Protocol
20+
import org.apache.spark.sql.delta.actions.TableFeatureProtocolUtils
21+
import org.apache.spark.sql.delta.test.DeltaSQLCommandTest
22+
23+
import org.apache.spark.SparkThrowable
24+
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
25+
import org.apache.spark.sql.catalyst.TableIdentifier
26+
import org.apache.spark.sql.test.SharedSparkSession
27+
import org.apache.spark.sql.types.StructType
28+
29+
class DeltaVariantSuite
30+
extends QueryTest
31+
with SharedSparkSession
32+
with DeltaSQLCommandTest {
33+
34+
private def getProtocolForTable(table: String): Protocol = {
35+
val deltaLog = DeltaLog.forTable(spark, TableIdentifier(table))
36+
deltaLog.unsafeVolatileSnapshot.protocol
37+
}
38+
39+
test("create a new table with Variant, higher protocol and feature should be picked.") {
40+
withTable("tbl") {
41+
sql("CREATE TABLE tbl(s STRING, v VARIANT) USING DELTA")
42+
sql("INSERT INTO tbl (SELECT 'foo', parse_json(cast(id + 99 as string)) FROM range(1))")
43+
assert(spark.table("tbl").selectExpr("v::int").head == Row(99))
44+
assert(
45+
getProtocolForTable("tbl") ==
46+
VariantTypeTableFeature.minProtocolVersion.withFeature(VariantTypeTableFeature)
47+
)
48+
}
49+
}
50+
51+
test("creating a table without Variant should use the usual minimum protocol") {
52+
withTable("tbl") {
53+
sql("CREATE TABLE tbl(s STRING, i INTEGER) USING DELTA")
54+
assert(getProtocolForTable("tbl") == Protocol(1, 2))
55+
56+
val deltaLog = DeltaLog.forTable(spark, TableIdentifier("tbl"))
57+
assert(
58+
!deltaLog.unsafeVolatileSnapshot.protocol.isFeatureSupported(VariantTypeTableFeature),
59+
s"Table tbl contains VariantTypeFeature descriptor when its not supposed to"
60+
)
61+
}
62+
}
63+
64+
test("add a new Variant column should upgrade to the correct protocol versions") {
65+
withTable("tbl") {
66+
sql("CREATE TABLE tbl(s STRING) USING delta")
67+
assert(getProtocolForTable("tbl") == Protocol(1, 2))
68+
69+
// Should throw error
70+
val e = intercept[SparkThrowable] {
71+
sql("ALTER TABLE tbl ADD COLUMN v VARIANT")
72+
}
73+
// capture the existing protocol here.
74+
// we will check the error message later in this test as we need to compare the
75+
// expected schema and protocol
76+
val deltaLog = DeltaLog.forTable(spark, TableIdentifier("tbl"))
77+
val currentProtocol = deltaLog.unsafeVolatileSnapshot.protocol
78+
val currentFeatures = currentProtocol.implicitlyAndExplicitlySupportedFeatures
79+
.map(_.name)
80+
.toSeq
81+
.sorted
82+
.mkString(", ")
83+
84+
// add table feature
85+
sql(
86+
s"ALTER TABLE tbl " +
87+
s"SET TBLPROPERTIES('delta.feature.variantType-dev' = 'supported')"
88+
)
89+
90+
sql("ALTER TABLE tbl ADD COLUMN v VARIANT")
91+
92+
// check previously thrown error message
93+
checkError(
94+
e,
95+
errorClass = "DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT",
96+
parameters = Map(
97+
"unsupportedFeatures" -> VariantTypeTableFeature.name,
98+
"supportedFeatures" -> currentFeatures
99+
)
100+
)
101+
102+
sql("INSERT INTO tbl (SELECT 'foo', parse_json(cast(id + 99 as string)) FROM range(1))")
103+
assert(spark.table("tbl").selectExpr("v::int").head == Row(99))
104+
105+
assert(
106+
getProtocolForTable("tbl") ==
107+
VariantTypeTableFeature.minProtocolVersion
108+
.withFeature(VariantTypeTableFeature)
109+
.withFeature(InvariantsTableFeature)
110+
.withFeature(AppendOnlyTableFeature)
111+
)
112+
}
113+
}
114+
115+
test("VariantType may not be used as a partition column") {
116+
withTable("delta_test") {
117+
checkError(
118+
exception = intercept[AnalysisException] {
119+
sql(
120+
"""CREATE TABLE delta_test(s STRING, v VARIANT)
121+
|USING delta
122+
|PARTITIONED BY (v)""".stripMargin)
123+
},
124+
errorClass = "INVALID_PARTITION_COLUMN_DATA_TYPE",
125+
parameters = Map("type" -> "\"VARIANT\"")
126+
)
127+
}
128+
}
129+
}

0 commit comments

Comments
 (0)