Skip to content

Commit 8cb7c0b

Browse files
committed
remove debug code
1 parent 7c2b4f8 commit 8cb7c0b

File tree

4 files changed

+32
-33
lines changed

4 files changed

+32
-33
lines changed

native/core/src/execution/datafusion/planner.rs

-1
Original file line numberDiff line numberDiff line change
@@ -1663,7 +1663,6 @@ impl PhysicalPlanner {
16631663
let child = self.create_expr(expr.child.as_ref().unwrap(), schema.clone())?;
16641664
let datatype = to_arrow_datatype(expr.datatype.as_ref().unwrap());
16651665
let child = Arc::new(CastExpr::new(child, datatype.clone(), None));
1666-
println!("&child: {:?}", &child);
16671666
Ok(("sum".to_string(), vec![child]))
16681667
}
16691668
other => Err(ExecutionError::GeneralError(format!(

spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala

+32
Original file line numberDiff line numberDiff line change
@@ -1503,6 +1503,38 @@ class CometExecSuite extends CometTestBase {
15031503
})
15041504
}
15051505

1506+
test("Windows decimal support") {
1507+
Seq("false").foreach(aqeEnabled =>
1508+
withSQLConf(
1509+
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
1510+
SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> aqeEnabled) {
1511+
1512+
val table = "sales_data"
1513+
withTable(table) {
1514+
sql(
1515+
"CREATE TABLE sales_data ( " +
1516+
"item_id INT, " +
1517+
"item_category VARCHAR(50), " +
1518+
"item_class VARCHAR(50), " +
1519+
"sale_price DECIMAL(7,2)) using parquet")
1520+
sql(
1521+
"INSERT INTO sales_data VALUES " +
1522+
"(1, 'Sports', 'Outdoor', 300.00), " +
1523+
"(1, 'Sports', 'Outdoor', 200.00), " +
1524+
"(2, 'Books', 'Fiction', 30.00), " +
1525+
"(3, 'Home', 'Furniture', 90.00)")
1526+
1527+
val df = sql(
1528+
"SELECT " +
1529+
"item_class, " +
1530+
"1 / SUM(SUM(sale_price)) OVER () AS nested_sum " +
1531+
"FROM sales_data " +
1532+
"GROUP BY item_class")
1533+
df.rdd.collect()
1534+
}
1535+
})
1536+
}
1537+
15061538
test("Windows support") {
15071539
Seq("true", "false").foreach(aqeEnabled =>
15081540
withSQLConf(

spark/src/test/scala/org/apache/spark/sql/CometSQLQueryTestHelper.scala

-28
Original file line numberDiff line numberDiff line change
@@ -62,10 +62,6 @@ trait CometSQLQueryTestHelper {
6262

6363
val df = session.sql(sql)
6464
val schema = df.schema.catalogString
65-
// scalastyle:off println
66-
println("schema0 schema0 schema0: " + schema)
67-
println("sql sql sql: " + sql)
68-
// scalastyle:on println
6965

7066
// Get answer, but also get rid of the #1234 expression ids that show up in explain plans
7167
val answer = SQLExecution.withNewExecutionId(df.queryExecution, Some(sql)) {
@@ -88,49 +84,25 @@ trait CometSQLQueryTestHelper {
8884
result
8985
} catch {
9086
case e: SparkThrowable with Throwable if e.getErrorClass != null =>
91-
// scalastyle:off println
92-
println("1111111")
93-
e.printStackTrace()
94-
println("e.getClass.getName: " + e.getClass.getName)
95-
println("e.getMessage: " + e.getMessage)
96-
// scalastyle:on println
9787
(emptySchema, Seq(e.getClass.getName, e.getMessage))
9888
case a: AnalysisException =>
9989
// Do not output the logical plan tree which contains expression IDs.
10090
// Also implement a crude way of masking expression IDs in the error message
10191
// with a generic pattern "###".
10292

10393
val msg = a.getMessage
104-
// scalastyle:off println
105-
println("222222")
106-
a.printStackTrace()
107-
println("e.getClass.getName: " + a.getClass.getName)
108-
println("e.getMessage: " + a.getMessage)
109-
// scalastyle:on println
11094
(emptySchema, Seq(a.getClass.getName, msg.replaceAll("#\\d+", "#x")))
11195
case s: SparkException if s.getCause != null =>
11296
// For a runtime exception, it is hard to match because its message contains
11397
// information of stage, task ID, etc.
11498
// To make result matching simpler, here we match the cause of the exception if it exists.
115-
// scalastyle:off println
116-
println("3333333")
117-
s.printStackTrace()
118-
println("e.getClass.getName: " + s.getClass.getName)
119-
println("e.getMessage: " + s.getMessage)
120-
// scalastyle:on println
12199
s.getCause match {
122100
case e: SparkThrowable with Throwable if e.getErrorClass != null =>
123101
(emptySchema, Seq(e.getClass.getName, e.getMessage))
124102
case cause =>
125103
(emptySchema, Seq(cause.getClass.getName, cause.getMessage))
126104
}
127105
case NonFatal(e) =>
128-
// scalastyle:off println
129-
println("44444")
130-
e.printStackTrace()
131-
println("e.getClass.getName: " + e.getClass.getName)
132-
println("e.getMessage: " + e.getMessage)
133-
// scalastyle:on println
134106
// If there is an exception, put the exception class followed by the message.
135107
(emptySchema, Seq(e.getClass.getName, e.getMessage))
136108
}

spark/src/test/scala/org/apache/spark/sql/CometTPCDSQueryTestSuite.scala

-4
Original file line numberDiff line numberDiff line change
@@ -128,10 +128,6 @@ class CometTPCDSQueryTestSuite extends QueryTest with TPCDSBase with CometSQLQue
128128
(segments(1).trim, segments(2).replaceAll("\\s+$", ""))
129129
}
130130

131-
// scalastyle:off println
132-
println("schema schema schema: " + schema)
133-
println("expected schema expected schema expected schema: " + expectedSchema)
134-
// scalastyle:on println
135131
val notMatchedSchemaOutput = if (schema == emptySchema) {
136132
// There might be exception. See `handleExceptions`.
137133
s"Schema did not match\n$queryString\nOutput/Exception: $outputString"

0 commit comments

Comments
 (0)