Skip to content

Commit 37c1f39

Browse files
alekjarmovAlek Jarmov
authored andcommitted
[SPARK-52262][SQL] swap order of withConnection and classifyException in loadTable
### What changes were proposed in this pull request? Swap order of `withConnection` and `classifyException` in `loadTable` since connection errors were swallowed with `FAILED_JDBC.TABLE_EXISTS` ### Why are the changes needed? To not have misleading error message. ### Does this PR introduce _any_ user-facing change? User would not see wrong wrapped error. ### How was this patch tested? Added a unit test and used debugger locally. ### Was this patch authored or co-authored using generative AI tooling? No Closes #50986 from alekjarmov/remove-wrap-wrong-error. Lead-authored-by: alekjarmov <[email protected]> Co-authored-by: Alek Jarmov <[email protected]> Co-authored-by: Alek Jarmov <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 692f1b6 commit 37c1f39

File tree

2 files changed

+24
-9
lines changed
  • connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2
  • sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc

2 files changed

+24
-9
lines changed

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1118,4 +1118,19 @@ private[v2] trait V2JDBCTest extends SharedSparkSession with DockerIntegrationFu
11181118
testBinaryLiteral("<=>", greaterThanBinary, 0)
11191119
}
11201120
}
1121+
1122+
test("SPARK-52262: FAILED_JDBC.TABLE_EXISTS not thrown on connection error") {
1123+
val invalidTableName = s"$catalogName.invalid"
1124+
val originalUrl = spark.conf.get(s"spark.sql.catalog.$catalogName.url")
1125+
val invalidUrl = originalUrl.replace("localhost", "nonexistenthost")
1126+
.replace("127.0.0.1", "1.2.3.4")
1127+
1128+
withSQLConf(s"spark.sql.catalog.$catalogName.url" -> invalidUrl) {
1129+
// Ideally we would catch SQLException, but analyzer wraps it
1130+
val e = intercept[AnalysisException] {
1131+
sql(s"SELECT * FROM $invalidTableName")
1132+
}
1133+
assert(e.getCondition !== "FAILED_JDBC.TABLE_EXISTS")
1134+
}
1135+
}
11211136
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -94,15 +94,15 @@ class JDBCTableCatalog extends TableCatalog
9494
checkNamespace(ident.namespace())
9595
val writeOptions = new JdbcOptionsInWrite(
9696
options.parameters + (JDBCOptions.JDBC_TABLE_NAME -> getTableName(ident)))
97-
JdbcUtils.classifyException(
98-
condition = "FAILED_JDBC.TABLE_EXISTS",
99-
messageParameters = Map(
100-
"url" -> options.getRedactUrl(),
101-
"tableName" -> toSQLId(ident)),
102-
dialect,
103-
description = s"Failed table existence check: $ident",
104-
isRuntime = false) {
105-
JdbcUtils.withConnection(options)(JdbcUtils.tableExists(_, writeOptions))
97+
JdbcUtils.withConnection(options) {
98+
JdbcUtils.classifyException(
99+
condition = "FAILED_JDBC.TABLE_EXISTS",
100+
messageParameters = Map(
101+
"url" -> options.getRedactUrl(),
102+
"tableName" -> toSQLId(ident)),
103+
dialect,
104+
description = s"Failed table existence check: $ident",
105+
isRuntime = false)(JdbcUtils.tableExists(_, writeOptions))
106106
}
107107
}
108108

0 commit comments

Comments
 (0)