Skip to content

Commit 58a1bee

Browse files
committed
rebase_main
1 parent 15dd820 commit 58a1bee

File tree

2 files changed

+13
-39
lines changed

2 files changed

+13
-39
lines changed

spark/src/main/scala/org/apache/comet/serde/arithmetic.scala

Lines changed: 5 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -88,11 +88,7 @@ trait MathBase {
8888
object CometAdd extends CometExpressionSerde[Add] with MathBase {
8989

9090
override def getSupportLevel(expr: Add): SupportLevel = {
91-
if (expr.evalMode == EvalMode.ANSI) {
92-
Incompatible(Some("ANSI mode is not supported"))
93-
} else {
94-
Compatible(None)
95-
}
91+
Compatible(None)
9692
}
9793

9894
override def convert(
@@ -118,11 +114,7 @@ object CometAdd extends CometExpressionSerde[Add] with MathBase {
118114
object CometSubtract extends CometExpressionSerde[Subtract] with MathBase {
119115

120116
override def getSupportLevel(expr: Subtract): SupportLevel = {
121-
if (expr.evalMode == EvalMode.ANSI) {
122-
Incompatible(Some("ANSI mode is not supported"))
123-
} else {
124-
Compatible(None)
125-
}
117+
Compatible(None)
126118
}
127119

128120
override def convert(
@@ -148,11 +140,7 @@ object CometSubtract extends CometExpressionSerde[Subtract] with MathBase {
148140
object CometMultiply extends CometExpressionSerde[Multiply] with MathBase {
149141

150142
override def getSupportLevel(expr: Multiply): SupportLevel = {
151-
if (expr.evalMode == EvalMode.ANSI) {
152-
Incompatible(Some("ANSI mode is not supported"))
153-
} else {
154-
Compatible(None)
155-
}
143+
Compatible(None)
156144
}
157145

158146
override def convert(
@@ -178,11 +166,7 @@ object CometMultiply extends CometExpressionSerde[Multiply] with MathBase {
178166
object CometDivide extends CometExpressionSerde[Divide] with MathBase {
179167

180168
override def getSupportLevel(expr: Divide): SupportLevel = {
181-
if (expr.evalMode == EvalMode.ANSI) {
182-
Incompatible(Some("ANSI mode is not supported"))
183-
} else {
184-
Compatible(None)
185-
}
169+
Compatible(None)
186170
}
187171

188172
override def convert(
@@ -213,11 +197,7 @@ object CometDivide extends CometExpressionSerde[Divide] with MathBase {
213197
object CometIntegralDivide extends CometExpressionSerde[IntegralDivide] with MathBase {
214198

215199
override def getSupportLevel(expr: IntegralDivide): SupportLevel = {
216-
if (expr.evalMode == EvalMode.ANSI) {
217-
Incompatible(Some("ANSI mode is not supported"))
218-
} else {
219-
Compatible(None)
220-
}
200+
Compatible(None)
221201
}
222202

223203
override def convert(

spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala

Lines changed: 8 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,9 @@ import org.apache.comet.CometSparkSessionExtensions.isSpark40Plus
4646
class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
4747
import testImplicits._
4848

49+
val ARITHMETIC_OVERFLOW_EXCEPTION_MSG =
50+
"""org.apache.comet.CometNativeException: [ARITHMETIC_OVERFLOW] Integer/Float overflow. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error."""
51+
4952
override protected def test(testName: String, testTags: Tag*)(testFun: => Any)(implicit
5053
pos: Position): Unit = {
5154
super.test(testName, testTags: _*) {
@@ -395,7 +398,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
395398
}
396399

397400
test("ANSI support for add") {
398-
assume(isSpark40Plus)
399401
val data = Seq((Integer.MAX_VALUE, 1), (Integer.MIN_VALUE, -1))
400402
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
401403
withParquetTable(data, "tbl") {
@@ -408,9 +410,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
408410

409411
checkSparkMaybeThrows(res) match {
410412
case (Some(sparkExc), Some(cometExc)) =>
411-
val cometErrorPattern =
412-
"""org.apache.spark.SparkArithmeticException: [ARITHMETIC_OVERFLOW] integer overflow. Use 'try_add' to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error"""
413-
assert(cometExc.getMessage.contains(cometErrorPattern))
413+
assert(cometExc.getMessage.contains(ARITHMETIC_OVERFLOW_EXCEPTION_MSG))
414414
assert(sparkExc.getMessage.contains("overflow"))
415415
case _ => fail("Exception should be thrown")
416416
}
@@ -419,7 +419,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
419419
}
420420

421421
test("ANSI support for subtract") {
422-
assume(isSpark40Plus)
423422
val data = Seq((Integer.MIN_VALUE, 1))
424423
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
425424
withParquetTable(data, "tbl") {
@@ -430,9 +429,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
430429
| """.stripMargin)
431430
checkSparkMaybeThrows(res) match {
432431
case (Some(sparkExc), Some(cometExc)) =>
433-
val cometErrorPattern =
434-
"""org.apache.spark.SparkArithmeticException: [ARITHMETIC_OVERFLOW] integer overflow. Use 'try_subtract' to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error."""
435-
assert(cometExc.getMessage.contains(cometErrorPattern))
432+
assert(cometExc.getMessage.contains(ARITHMETIC_OVERFLOW_EXCEPTION_MSG))
436433
assert(sparkExc.getMessage.contains("overflow"))
437434
case _ => fail("Exception should be thrown")
438435
}
@@ -441,7 +438,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
441438
}
442439

443440
test("ANSI support for multiply") {
444-
assume(isSpark40Plus)
445441
val data = Seq((Integer.MAX_VALUE, 10))
446442
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
447443
withParquetTable(data, "tbl") {
@@ -453,9 +449,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
453449

454450
checkSparkMaybeThrows(res) match {
455451
case (Some(sparkExc), Some(cometExc)) =>
456-
val cometErrorPattern =
457-
"""org.apache.spark.SparkArithmeticException: [ARITHMETIC_OVERFLOW] integer overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error"""
458-
assert(cometExc.getMessage.contains(cometErrorPattern))
452+
assert(cometExc.getMessage.contains(ARITHMETIC_OVERFLOW_EXCEPTION_MSG))
459453
assert(sparkExc.getMessage.contains("overflow"))
460454
case _ => fail("Exception should be thrown")
461455
}
@@ -464,7 +458,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
464458
}
465459

466460
test("ANSI support for divide") {
467-
assume(isSpark40Plus)
468461
val data = Seq((Integer.MIN_VALUE, 0))
469462
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
470463
withParquetTable(data, "tbl") {
@@ -473,10 +466,11 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
473466
| _1 / _2
474467
| from tbl
475468
| """.stripMargin)
469+
476470
checkSparkMaybeThrows(res) match {
477471
case (Some(sparkExc), Some(cometExc)) =>
478472
val cometErrorPattern =
479-
"""org.apache.spark.SparkArithmeticException: [DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead"""
473+
"""org.apache.comet.CometNativeException: [DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead"""
480474
assert(cometExc.getMessage.contains(cometErrorPattern))
481475
assert(sparkExc.getMessage.contains("Division by zero"))
482476
case _ => fail("Exception should be thrown")

0 commit comments

Comments
 (0)