Skip to content

Commit 29f8756

Browse files
committed
we don't support 3.4
1. isSparkVersionGE("3.5") 2. isSparkVersionLE("3.3")
1 parent 5da604f commit 29f8756

8 files changed

+31
-37
lines changed

backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseNativeWriteTableSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -603,7 +603,7 @@ class GlutenClickHouseNativeWriteTableSuite
603603
("timestamp_field", "timestamp")
604604
)
605605
def excludeTimeFieldForORC(format: String): Seq[String] = {
606-
if (format.equals("orc") && isSparkVersionGE("3.4")) {
606+
if (format.equals("orc") && isSparkVersionGE("3.5")) {
607607
// FIXME:https://github.com/apache/incubator-gluten/pull/6507
608608
fields.keys.filterNot(_.equals("timestamp_field")).toSeq
609609
} else {
@@ -913,7 +913,7 @@ class GlutenClickHouseNativeWriteTableSuite
913913
(table_name, create_sql, insert_sql)
914914
},
915915
(table_name, _) =>
916-
if (isSparkVersionGE("3.4")) {
916+
if (isSparkVersionGE("3.5")) {
917917
compareResultsAgainstVanillaSpark(
918918
s"select * from $table_name",
919919
compareResult = true,

backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHBucketSuite.scala

+11-13
Original file line numberDiff line numberDiff line change
@@ -236,8 +236,7 @@ class GlutenClickHouseTPCHBucketSuite
236236
}
237237
assert(!plans.head.asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
238238
assert(plans.head.metrics("numFiles").value === 2)
239-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
240-
assert(plans.head.metrics("pruningTime").value === pruningTimeValue)
239+
assert(plans.head.metrics("pruningTime").value === pruningTimeValueSpark)
241240
assert(plans.head.metrics("numOutputRows").value === 591673)
242241
})
243242
}
@@ -292,7 +291,7 @@ class GlutenClickHouseTPCHBucketSuite
292291
}
293292

294293
if (sparkVersion.equals("3.2")) {
295-
assert(!(plans(11).asInstanceOf[FileSourceScanExecTransformer].bucketedScan))
294+
assert(!plans(11).asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
296295
} else {
297296
assert(plans(11).asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
298297
}
@@ -328,14 +327,14 @@ class GlutenClickHouseTPCHBucketSuite
328327
.isInstanceOf[InputIteratorTransformer])
329328

330329
if (sparkVersion.equals("3.2")) {
331-
assert(!(plans(2).asInstanceOf[FileSourceScanExecTransformer].bucketedScan))
330+
assert(!plans(2).asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
332331
} else {
333332
assert(plans(2).asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
334333
}
335334
assert(plans(2).metrics("numFiles").value === 2)
336335
assert(plans(2).metrics("numOutputRows").value === 3111)
337336

338-
assert(!(plans(3).asInstanceOf[FileSourceScanExecTransformer].bucketedScan))
337+
assert(!plans(3).asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
339338
assert(plans(3).metrics("numFiles").value === 2)
340339
assert(plans(3).metrics("numOutputRows").value === 72678)
341340
})
@@ -367,12 +366,12 @@ class GlutenClickHouseTPCHBucketSuite
367366
}
368367
// bucket join
369368
assert(
370-
plans(0)
369+
plans.head
371370
.asInstanceOf[HashJoinLikeExecTransformer]
372371
.left
373372
.isInstanceOf[ProjectExecTransformer])
374373
assert(
375-
plans(0)
374+
plans.head
376375
.asInstanceOf[HashJoinLikeExecTransformer]
377376
.right
378377
.isInstanceOf[ProjectExecTransformer])
@@ -412,8 +411,7 @@ class GlutenClickHouseTPCHBucketSuite
412411
}
413412
assert(!plans.head.asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
414413
assert(plans.head.metrics("numFiles").value === 2)
415-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
416-
assert(plans.head.metrics("pruningTime").value === pruningTimeValue)
414+
assert(plans.head.metrics("pruningTime").value === pruningTimeValueSpark)
417415
assert(plans.head.metrics("numOutputRows").value === 11618)
418416
})
419417
}
@@ -427,12 +425,12 @@ class GlutenClickHouseTPCHBucketSuite
427425
}
428426
// bucket join
429427
assert(
430-
plans(0)
428+
plans.head
431429
.asInstanceOf[HashJoinLikeExecTransformer]
432430
.left
433431
.isInstanceOf[FilterExecTransformerBase])
434432
assert(
435-
plans(0)
433+
plans.head
436434
.asInstanceOf[HashJoinLikeExecTransformer]
437435
.right
438436
.isInstanceOf[ProjectExecTransformer])
@@ -587,7 +585,7 @@ class GlutenClickHouseTPCHBucketSuite
587585
def checkResult(df: DataFrame, exceptedResult: Seq[Row]): Unit = {
588586
// check the result
589587
val result = df.collect()
590-
assert(result.size == exceptedResult.size)
588+
assert(result.length == exceptedResult.size)
591589
val sortedRes = result.map {
592590
s =>
593591
Row.fromSeq(s.toSeq.map {
@@ -788,7 +786,7 @@ class GlutenClickHouseTPCHBucketSuite
788786
|order by l_orderkey, l_returnflag, t
789787
|limit 10
790788
|""".stripMargin
791-
runSql(SQL7, false)(
789+
runSql(SQL7, noFallBack = false)(
792790
df => {
793791
checkResult(
794792
df,

backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseWholeStageTransformerSuite.scala

+2
Original file line numberDiff line numberDiff line change
@@ -194,5 +194,7 @@ class GlutenClickHouseWholeStageTransformerSuite extends WholeStageTransformerSu
194194
ignore(s"[$SPARK_VERSION_SHORT]-$testName", testTag: _*)(testFun)
195195
}
196196
}
197+
198+
lazy val pruningTimeValueSpark: Int = if (isSparkVersionLE("3.3")) -1 else 0
197199
}
198200
// scalastyle:off line.size.limit

backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickhouseCountDistinctSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -105,9 +105,9 @@ class GlutenClickhouseCountDistinctSuite extends GlutenClickHouseWholeStageTrans
105105
val sql = s"""
106106
select count(distinct(a,b)) , try_add(c,b) from
107107
values (0, null,1), (0,null,2), (1, 1,4) as data(a,b,c) group by try_add(c,b)
108-
""";
108+
"""
109109
val df = spark.sql(sql)
110-
WholeStageTransformerSuite.checkFallBack(df, noFallback = isSparkVersionGE("3.4"))
110+
WholeStageTransformerSuite.checkFallBack(df, noFallback = isSparkVersionGE("3.5"))
111111
}
112112

113113
test("check count distinct with filter") {

backends-clickhouse/src/test/scala/org/apache/gluten/execution/metrics/GlutenClickHouseTPCHMetricsSuite.scala

+9-11
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import org.apache.spark.util.TaskResources
2828
import scala.collection.JavaConverters._
2929

3030
class GlutenClickHouseTPCHMetricsSuite extends GlutenClickHouseTPCHAbstractSuite {
31-
private val parquetMaxBlockSize = 4096;
31+
private val parquetMaxBlockSize = 4096
3232
override protected val needCopyParquetToTablePath = true
3333

3434
override protected val tablesPath: String = basePath + "/tpch-data"
@@ -71,8 +71,7 @@ class GlutenClickHouseTPCHMetricsSuite extends GlutenClickHouseTPCHAbstractSuite
7171
assert(plans.size == 3)
7272

7373
assert(plans(2).metrics("numFiles").value === 1)
74-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
75-
assert(plans(2).metrics("pruningTime").value === pruningTimeValue)
74+
assert(plans(2).metrics("pruningTime").value === pruningTimeValueSpark)
7675
assert(plans(2).metrics("filesSize").value === 19230111)
7776

7877
assert(plans(1).metrics("numOutputRows").value === 4)
@@ -140,16 +139,15 @@ class GlutenClickHouseTPCHMetricsSuite extends GlutenClickHouseTPCHAbstractSuite
140139
assert(plans.size == 3)
141140

142141
assert(plans(2).metrics("numFiles").value === 1)
143-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
144-
assert(plans(2).metrics("pruningTime").value === pruningTimeValue)
142+
assert(plans(2).metrics("pruningTime").value === pruningTimeValueSpark)
145143
assert(plans(2).metrics("filesSize").value === 19230111)
146144

147145
assert(plans(1).metrics("numOutputRows").value === 4)
148146
assert(plans(1).metrics("outputVectors").value === 1)
149147

150148
// Execute Sort operator, it will read the data twice.
151-
assert(plans(0).metrics("numOutputRows").value === 4)
152-
assert(plans(0).metrics("outputVectors").value === 1)
149+
assert(plans.head.metrics("numOutputRows").value === 4)
150+
assert(plans.head.metrics("outputVectors").value === 1)
153151
}
154152
}
155153
}
@@ -167,7 +165,7 @@ class GlutenClickHouseTPCHMetricsSuite extends GlutenClickHouseTPCHAbstractSuite
167165
)
168166

169167
assert(nativeMetricsList.size == 1)
170-
val nativeMetricsData = nativeMetricsList(0)
168+
val nativeMetricsData = nativeMetricsList.head
171169
assert(nativeMetricsData.metricsDataList.size() == 3)
172170

173171
assert(nativeMetricsData.metricsDataList.get(0).getName.equals("kRead"))
@@ -289,7 +287,7 @@ class GlutenClickHouseTPCHMetricsSuite extends GlutenClickHouseTPCHAbstractSuite
289287
assert(joinPlan.metrics("inputBytes").value == 1920000)
290288
}
291289

292-
val wholeStageTransformer2 = allWholeStageTransformers(0)
290+
val wholeStageTransformer2 = allWholeStageTransformers.head
293291

294292
GlutenClickHouseMetricsUTUtils.executeMetricsUpdater(
295293
wholeStageTransformer2,
@@ -327,7 +325,7 @@ class GlutenClickHouseTPCHMetricsSuite extends GlutenClickHouseTPCHAbstractSuite
327325
)
328326

329327
assert(nativeMetricsList.size == 1)
330-
val nativeMetricsData = nativeMetricsList(0)
328+
val nativeMetricsData = nativeMetricsList.head
331329
assert(nativeMetricsData.metricsDataList.size() == 5)
332330

333331
assert(nativeMetricsData.metricsDataList.get(0).getName.equals("kRead"))
@@ -401,7 +399,7 @@ class GlutenClickHouseTPCHMetricsSuite extends GlutenClickHouseTPCHAbstractSuite
401399
)
402400

403401
assert(nativeMetricsListFinal.size == 1)
404-
val nativeMetricsDataFinal = nativeMetricsListFinal(0)
402+
val nativeMetricsDataFinal = nativeMetricsListFinal.head
405403
assert(nativeMetricsDataFinal.metricsDataList.size() == 3)
406404

407405
assert(nativeMetricsDataFinal.metricsDataList.get(0).getName.equals("kRead"))

backends-clickhouse/src/test/scala/org/apache/gluten/execution/tpch/GlutenClickHouseTPCHColumnarShuffleParquetAQESuite.scala

+2-4
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,7 @@ class GlutenClickHouseTPCHColumnarShuffleParquetAQESuite
6666
assert(plans.size == 5)
6767

6868
assert(plans(4).metrics("numFiles").value === 1)
69-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
70-
assert(plans(4).metrics("pruningTime").value === pruningTimeValue)
69+
assert(plans(4).metrics("pruningTime").value === pruningTimeValueSpark)
7170
assert(plans(4).metrics("filesSize").value === 19230111)
7271
assert(plans(4).metrics("numOutputRows").value === 600572)
7372

@@ -99,8 +98,7 @@ class GlutenClickHouseTPCHColumnarShuffleParquetAQESuite
9998
assert(plans.size == 3)
10099

101100
assert(plans(2).metrics("numFiles").value === 1)
102-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
103-
assert(plans(2).metrics("pruningTime").value === pruningTimeValue)
101+
assert(plans(2).metrics("pruningTime").value === pruningTimeValueSpark)
104102
assert(plans(2).metrics("filesSize").value === 19230111)
105103

106104
assert(plans(1).metrics("numInputRows").value === 591673)

backends-clickhouse/src/test/scala/org/apache/gluten/execution/tpch/GlutenClickHouseTPCHParquetBucketSuite.scala

+2-4
Original file line numberDiff line numberDiff line change
@@ -263,8 +263,7 @@ class GlutenClickHouseTPCHParquetBucketSuite
263263
}
264264
assert(!plans.head.asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
265265
assert(plans.head.metrics("numFiles").value === 4)
266-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
267-
assert(plans.head.metrics("pruningTime").value === pruningTimeValue)
266+
assert(plans.head.metrics("pruningTime").value === pruningTimeValueSpark)
268267
assert(plans.head.metrics("numOutputRows").value === 600572)
269268
}
270269
)
@@ -458,8 +457,7 @@ class GlutenClickHouseTPCHParquetBucketSuite
458457
}
459458
assert(!plans.head.asInstanceOf[FileSourceScanExecTransformer].bucketedScan)
460459
assert(plans.head.metrics("numFiles").value === 4)
461-
val pruningTimeValue = if (isSparkVersionGE("3.4")) 0 else -1
462-
assert(plans.head.metrics("pruningTime").value === pruningTimeValue)
460+
assert(plans.head.metrics("pruningTime").value === pruningTimeValueSpark)
463461
assert(plans.head.metrics("numOutputRows").value === 600572)
464462
}
465463
)

backends-clickhouse/src/test/scala/org/apache/spark/gluten/NativeWriteChecker.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ trait NativeWriteChecker
4040
override def onSuccess(funcName: String, qe: QueryExecution, duration: Long): Unit = {
4141
if (!nativeUsed) {
4242
val executedPlan = stripAQEPlan(qe.executedPlan)
43-
nativeUsed = if (isSparkVersionGE("3.4")) {
43+
nativeUsed = if (isSparkVersionGE("3.5")) {
4444
executedPlan.find(_.isInstanceOf[ColumnarWriteFilesExec]).isDefined
4545
} else {
4646
executedPlan.find(_.isInstanceOf[FakeRowAdaptor]).isDefined

0 commit comments

Comments
 (0)