Skip to content

Commit bb79752

Browse files
test: run more JVM tests (#4026)
1 parent 29bcb75 commit bb79752

2 files changed

Lines changed: 4 additions & 14 deletions

File tree

spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2401,8 +2401,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
24012401
}
24022402

24032403
test("get_struct_field - select primitive fields") {
2404-
val scanImpl = CometConf.COMET_NATIVE_SCAN_IMPL.get()
2405-
assume(!(scanImpl == CometConf.SCAN_AUTO && CometSparkSessionExtensions.isSpark40Plus))
24062404
withTempPath { dir =>
24072405
// create input file with Comet disabled
24082406
withSQLConf(CometConf.COMET_ENABLED.key -> "false") {
@@ -2420,8 +2418,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
24202418
}
24212419

24222420
test("get_struct_field - select subset of struct") {
2423-
val scanImpl = CometConf.COMET_NATIVE_SCAN_IMPL.get()
2424-
assume(!(scanImpl == CometConf.SCAN_AUTO && CometSparkSessionExtensions.isSpark40Plus))
24252421
withTempPath { dir =>
24262422
// create input file with Comet disabled
24272423
withSQLConf(CometConf.COMET_ENABLED.key -> "false") {
@@ -2449,8 +2445,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
24492445
}
24502446

24512447
test("get_struct_field - read entire struct") {
2452-
val scanImpl = CometConf.COMET_NATIVE_SCAN_IMPL.get()
2453-
assume(!(scanImpl == CometConf.SCAN_AUTO && CometSparkSessionExtensions.isSpark40Plus))
24542448
withTempPath { dir =>
24552449
// create input file with Comet disabled
24562450
withSQLConf(CometConf.COMET_ENABLED.key -> "false") {

spark/src/test/scala/org/apache/comet/CometMathExpressionSuite.scala

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe
9292
DataGenOptions(generateNegativeZero = generateNegativeZero))
9393
}
9494

95-
// https://github.com/apache/datafusion-comet/issues/3561
96-
ignore("width_bucket") {
95+
test("width_bucket") {
9796
assume(isSpark35Plus, "width_bucket was added in Spark 3.5")
9897
withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") {
9998
spark
@@ -106,8 +105,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe
106105
}
107106
}
108107

109-
// https://github.com/apache/datafusion-comet/issues/3561
110-
ignore("width_bucket - edge cases") {
108+
test("width_bucket - edge cases") {
111109
assume(isSpark35Plus, "width_bucket was added in Spark 3.5")
112110
withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") {
113111
spark
@@ -124,8 +122,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe
124122
}
125123
}
126124

127-
// https://github.com/apache/datafusion-comet/issues/3561
128-
ignore("width_bucket - NaN values") {
125+
test("width_bucket - NaN values") {
129126
assume(isSpark35Plus, "width_bucket was added in Spark 3.5")
130127
withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") {
131128
spark
@@ -137,8 +134,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe
137134
}
138135
}
139136

140-
// https://github.com/apache/datafusion-comet/issues/3561
141-
ignore("width_bucket - with range data") {
137+
test("width_bucket - with range data") {
142138
assume(isSpark35Plus, "width_bucket was added in Spark 3.5")
143139
withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") {
144140
spark

0 commit comments

Comments
 (0)