diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 9b50312539bca..0e54ca83f04cb 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -5713,11 +5713,6 @@ "The value of from-to unit must be a string." ] }, - "_LEGACY_ERROR_TEMP_0028" : { - "message" : [ - "Intervals FROM TO are not supported." - ] - }, "_LEGACY_ERROR_TEMP_0029" : { "message" : [ "Cannot mix year-month and day-time fields: ." diff --git a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala index 71e8517a4164e..974d3ca6b2858 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala @@ -111,7 +111,14 @@ class DataTypeAstBuilder extends SqlBaseParserBaseVisitor[AnyRef] { val endStr = ctx.to.getText.toLowerCase(Locale.ROOT) val end = YearMonthIntervalType.stringToField(endStr) if (end <= start) { - throw QueryParsingErrors.fromToIntervalUnsupportedError(startStr, endStr, ctx) + val intervalInput = ctx.getText() + val pattern = "'([^']*)'".r + + val input = pattern.findFirstMatchIn(intervalInput) match { + case Some(m) => m.group(1) + case None => "" + } + throw QueryParsingErrors.fromToIntervalUnsupportedError(input, startStr, endStr, ctx) } YearMonthIntervalType(start, end) } else { @@ -126,7 +133,14 @@ class DataTypeAstBuilder extends SqlBaseParserBaseVisitor[AnyRef] { val endStr = ctx.to.getText.toLowerCase(Locale.ROOT) val end = DayTimeIntervalType.stringToField(endStr) if (end <= start) { - throw QueryParsingErrors.fromToIntervalUnsupportedError(startStr, endStr, ctx) + val intervalInput = ctx.getText() + val pattern = "'([^']*)'".r + + val input = pattern.findFirstMatchIn(intervalInput) match { + case Some(m) => m.group(1) + case None => "" + } + throw QueryParsingErrors.fromToIntervalUnsupportedError(input, startStr, endStr, ctx) } DayTimeIntervalType(start, end) } else { diff --git a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index 199a1ed868d6e..14ad10ab5b5f8 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -281,12 +281,13 @@ private[sql] object QueryParsingErrors extends DataTypeErrorsBase { } def fromToIntervalUnsupportedError( + input: String, from: String, to: String, ctx: ParserRuleContext): Throwable = { new ParseException( - errorClass = "_LEGACY_ERROR_TEMP_0028", - messageParameters = Map("from" -> from, "to" -> to), + errorClass = "INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION", + messageParameters = Map("input" -> input, "from" -> from, "to" -> to), ctx) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index dde2ec6f5ae64..b6168e3a9ea85 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -3570,7 +3570,14 @@ class AstBuilder extends DataTypeAstBuilder IntervalUtils.fromDayTimeString(value, DayTimeIntervalType.stringToField(from), DayTimeIntervalType.stringToField(to)) case _ => - throw QueryParsingErrors.fromToIntervalUnsupportedError(from, to, ctx) + val intervalInput = ctx.getText() + val pattern = "'([^']*)'".r + + val input = pattern.findFirstMatchIn(intervalInput) match { + case Some(m) => m.group(1) + case None => "" + } + throw QueryParsingErrors.fromToIntervalUnsupportedError(input, from, to, ctx) } } catch { // Keep error class of SparkIllegalArgumentExceptions and enrich it with query context diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 6d307d1cd9a87..fc8bcfa3f6870 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -1082,8 +1082,8 @@ class ExpressionParserSuite extends AnalysisTest { // Unknown FROM TO intervals checkError( exception = parseException("interval '10' month to second"), - condition = "_LEGACY_ERROR_TEMP_0028", - parameters = Map("from" -> "month", "to" -> "second"), + condition = "INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION", + parameters = Map("input" -> "10", "from" -> "month", "to" -> "second"), context = ExpectedContext( fragment = "'10' month to second", start = 9, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out index b0d128c4cab69..c023e3b56f117 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out @@ -1233,9 +1233,11 @@ select interval '1' year to second -- !query analysis org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0028", + "errorClass" : "INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION", + "sqlState" : "22006", "messageParameters" : { "from" : "year", + "input" : "1", "to" : "second" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out index efa149509751d..c0196bbe118ef 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out @@ -1233,9 +1233,11 @@ select interval '1' year to second -- !query analysis org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0028", + "errorClass" : "INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION", + "sqlState" : "22006", "messageParameters" : { "from" : "year", + "input" : "1", "to" : "second" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index b2f85835eb0df..766bfba7696f0 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -1535,9 +1535,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0028", + "errorClass" : "INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION", + "sqlState" : "22006", "messageParameters" : { "from" : "year", + "input" : "1", "to" : "second" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 5471dafaec8eb..7eed2d42da043 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -1422,9 +1422,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0028", + "errorClass" : "INVALID_INTERVAL_FORMAT.UNSUPPORTED_FROM_TO_EXPRESSION", + "sqlState" : "22006", "messageParameters" : { "from" : "year", + "input" : "1", "to" : "second" }, "queryContext" : [ {