Skip to content

Commit

Permalink
[SPARK-50236][SQL] Assign appropriate error condition for `_LEGACY_ER…
Browse files Browse the repository at this point in the history
…ROR_TEMP_1156`: `COLUMN_NOT_DEFINED_IN_TABLE `

### What changes were proposed in this pull request?

This PR proposes to Integrate `_LEGACY_ERROR_TEMP_1156` into `COLUMN_NOT_DEFINED_IN_TABLE`

### Why are the changes needed?

To improve the error message by assigning proper error condition and SQLSTATE

### Does this PR introduce _any_ user-facing change?

No, only user-facing error message improved

### How was this patch tested?

Updated the existing tests

### Was this patch authored or co-authored using generative AI tooling?

No

Closes apache#48768 from itholic/LEGACY_1156.

Authored-by: Haejoon Lee <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
  • Loading branch information
itholic authored and MaxGekk committed Nov 15, 2024
1 parent 11e4706 commit 007c31d
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 22 deletions.
5 changes: 0 additions & 5 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -6400,11 +6400,6 @@
"Partition column `<col>` not found in schema <schemaCatalog>."
]
},
"_LEGACY_ERROR_TEMP_1156" : {
"message" : [
"Column <colName> not found in schema <tableSchema>."
]
},
"_LEGACY_ERROR_TEMP_1158" : {
"message" : [
"Saving data into a view is not allowed."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1832,12 +1832,18 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat
}

def columnNotFoundInSchemaError(
col: StructField, tableSchema: Option[StructType]): Throwable = {
colType: DataType,
colName: String,
tableName: String,
tableCols: Array[String]): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1156",
errorClass = "COLUMN_NOT_DEFINED_IN_TABLE",
messageParameters = Map(
"colName" -> col.name,
"tableSchema" -> tableSchema.toString))
"colType" -> toSQLType(colType),
"colName" -> toSQLId(colName),
"tableName" -> toSQLId(tableName),
"tableCols" -> tableCols.map(toSQLId).mkString(", "))
)
}

def saveDataIntoViewNotAllowedError(): Throwable = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,8 @@ object JdbcUtils extends Logging with SQLConfHelper {
// RDD column names for user convenience.
rddSchema.fields.map { col =>
tableSchema.get.find(f => conf.resolver(f.name, col.name)).getOrElse {
throw QueryCompilationErrors.columnNotFoundInSchemaError(col, tableSchema)
throw QueryCompilationErrors.columnNotFoundInSchemaError(
col.dataType, col.name, table, rddSchema.fieldNames)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,11 +191,13 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
exception = intercept[AnalysisException] {
df2.write.mode(SaveMode.Append).jdbc(url, "TEST.APPENDTEST", new Properties())
},
condition = "_LEGACY_ERROR_TEMP_1156",
condition = "COLUMN_NOT_DEFINED_IN_TABLE",
parameters = Map(
"colName" -> "NAME",
"tableSchema" ->
"Some(StructType(StructField(name,StringType,true),StructField(id,IntegerType,true)))"))
"colType" -> "\"STRING\"",
"colName" -> "`NAME`",
"tableName" -> "`TEST`.`APPENDTEST`",
"tableCols" -> "`NAME`, `ID`")
)
}

withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
Expand Down Expand Up @@ -224,11 +226,13 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
df3.write.mode(SaveMode.Overwrite).option("truncate", true)
.jdbc(url1, "TEST.TRUNCATETEST", properties)
},
condition = "_LEGACY_ERROR_TEMP_1156",
condition = "COLUMN_NOT_DEFINED_IN_TABLE",
parameters = Map(
"colName" -> "seq",
"tableSchema" ->
"Some(StructType(StructField(name,StringType,true),StructField(id,IntegerType,true)))"))
"colType" -> "\"INT\"",
"colName" -> "`seq`",
"tableName" -> "`TEST`.`TRUNCATETEST`",
"tableCols" -> "`name`, `id`, `seq`")
)
} finally {
JdbcDialects.unregisterDialect(testH2Dialect)
JdbcDialects.registerDialect(H2Dialect())
Expand Down Expand Up @@ -256,11 +260,13 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter {
exception = intercept[AnalysisException] {
df2.write.mode(SaveMode.Append).jdbc(url, "TEST.INCOMPATIBLETEST", new Properties())
},
condition = "_LEGACY_ERROR_TEMP_1156",
condition = "COLUMN_NOT_DEFINED_IN_TABLE",
parameters = Map(
"colName" -> "seq",
"tableSchema" ->
"Some(StructType(StructField(name,StringType,true),StructField(id,IntegerType,true)))"))
"colType" -> "\"INT\"",
"colName" -> "`seq`",
"tableName" -> "`TEST`.`INCOMPATIBLETEST`",
"tableCols" -> "`name`, `id`, `seq`")
)
}

test("INSERT to JDBC Datasource") {
Expand Down

0 comments on commit 007c31d

Please sign in to comment.