@@ -20,12 +20,6 @@ import org.apache.gluten.GlutenConfig
20
20
import org .apache .gluten .utils .UTSystemParameters
21
21
22
22
import org .apache .spark .SparkConf
23
- import org .apache .spark .sql .SparkSession
24
- import org .apache .spark .sql .delta .DeltaLog
25
-
26
- import org .apache .commons .io .FileUtils
27
-
28
- import java .io .File
29
23
30
24
class GlutenClickhouseFunctionSuite extends GlutenClickHouseTPCHAbstractSuite {
31
25
override protected val needCopyParquetToTablePath = true
@@ -39,9 +33,6 @@ class GlutenClickhouseFunctionSuite extends GlutenClickHouseTPCHAbstractSuite {
39
33
createNotNullTPCHTablesInParquet(tablesPath)
40
34
}
41
35
42
- private var _hiveSpark : SparkSession = _
43
- override protected def spark : SparkSession = _hiveSpark
44
-
45
36
override protected def sparkConf : SparkConf = {
46
37
new SparkConf ()
47
38
.set(" spark.plugins" , " org.apache.gluten.GlutenPlugin" )
@@ -69,70 +60,21 @@ class GlutenClickhouseFunctionSuite extends GlutenClickHouseTPCHAbstractSuite {
69
60
.setMaster(" local[1]" )
70
61
}
71
62
72
- override protected def initializeSession (): Unit = {
73
- if (_hiveSpark == null ) {
74
- val hiveMetaStoreDB = metaStorePathAbsolute + " /metastore_db"
75
- _hiveSpark = SparkSession
76
- .builder()
77
- .config(sparkConf)
78
- .enableHiveSupport()
79
- .config(
80
- " javax.jdo.option.ConnectionURL" ,
81
- s " jdbc:derby:;databaseName= $hiveMetaStoreDB;create=true " )
82
- .getOrCreate()
83
- }
84
- }
85
-
86
- override def beforeAll (): Unit = {
87
- // prepare working paths
88
- val basePathDir = new File (basePath)
89
- if (basePathDir.exists()) {
90
- FileUtils .forceDelete(basePathDir)
91
- }
92
- FileUtils .forceMkdir(basePathDir)
93
- FileUtils .forceMkdir(new File (warehouse))
94
- FileUtils .forceMkdir(new File (metaStorePathAbsolute))
95
- FileUtils .copyDirectory(new File (rootPath + resourcePath), new File (tablesPath))
96
- super .beforeAll()
97
- }
98
-
99
- override protected def afterAll (): Unit = {
100
- DeltaLog .clearCache()
101
-
102
- try {
103
- super .afterAll()
104
- } finally {
105
- try {
106
- if (_hiveSpark != null ) {
107
- try {
108
- _hiveSpark.sessionState.catalog.reset()
109
- } finally {
110
- _hiveSpark.stop()
111
- _hiveSpark = null
112
- }
113
- }
114
- } finally {
115
- SparkSession .clearActiveSession()
116
- SparkSession .clearDefaultSession()
117
- }
118
- }
119
- }
120
-
121
63
test(" test uuid - write and read" ) {
122
64
withSQLConf(
123
65
(" spark.gluten.sql.native.writer.enabled" , " true" ),
124
66
(GlutenConfig .GLUTEN_ENABLED .key, " true" )) {
67
+ withTable(" uuid_test" ) {
68
+ spark.sql(" create table if not exists uuid_test (id string) using parquet" )
125
69
126
- spark.sql(" drop table if exists uuid_test" )
127
- spark.sql(" create table if not exists uuid_test (id string) stored as parquet" )
128
-
129
- val df = spark.sql(" select regexp_replace(uuid(), '-', '') as id from range(1)" )
130
- df.cache()
131
- df.write.insertInto(" uuid_test" )
70
+ val df = spark.sql(" select regexp_replace(uuid(), '-', '') as id from range(1)" )
71
+ df.cache()
72
+ df.write.insertInto(" uuid_test" )
132
73
133
- val df2 = spark.table(" uuid_test" )
134
- val diffCount = df.exceptAll(df2).count()
135
- assert(diffCount == 0 )
74
+ val df2 = spark.table(" uuid_test" )
75
+ val diffCount = df.exceptAll(df2).count()
76
+ assert(diffCount == 0 )
77
+ }
136
78
}
137
79
}
138
80
@@ -181,49 +123,51 @@ class GlutenClickhouseFunctionSuite extends GlutenClickHouseTPCHAbstractSuite {
181
123
}
182
124
183
125
test(" GLUTEN-5981 null value from get_json_object" ) {
184
- spark.sql(" create table json_t1 (a string) using parquet" )
185
- spark.sql(" insert into json_t1 values ('{\" a\" :null}')" )
186
- runQueryAndCompare(
187
- """
188
- |SELECT get_json_object(a, '$.a') is null from json_t1
189
- |""" .stripMargin
190
- )(df => checkFallbackOperators(df, 0 ))
191
- spark.sql(" drop table json_t1" )
126
+ withTable(" json_t1" ) {
127
+ spark.sql(" create table json_t1 (a string) using parquet" )
128
+ spark.sql(" insert into json_t1 values ('{\" a\" :null}')" )
129
+ runQueryAndCompare(
130
+ """
131
+ |SELECT get_json_object(a, '$.a') is null from json_t1
132
+ |""" .stripMargin
133
+ )(df => checkFallbackOperators(df, 0 ))
134
+ }
192
135
}
193
136
194
137
test(" Fix arrayDistinct(Array(Nullable(Decimal))) core dump" ) {
195
- val create_sql =
196
- """
197
- |create table if not exists test(
198
- | dec array<decimal(10, 2)>
199
- |) using parquet
200
- |""" .stripMargin
201
- val fill_sql =
202
- """
203
- |insert into test values(array(1, 2, null)), (array(null, 2,3, 5))
204
- |""" .stripMargin
205
- val query_sql =
206
- """
207
- |select array_distinct(dec) from test;
208
- |""" .stripMargin
209
- spark.sql(create_sql)
210
- spark.sql(fill_sql)
211
- compareResultsAgainstVanillaSpark(query_sql, true , { _ => })
212
- spark.sql(" drop table test" )
138
+ withTable(" json_t1" ) {
139
+ val create_sql =
140
+ """
141
+ |create table if not exists test(
142
+ | dec array<decimal(10, 2)>
143
+ |) using parquet
144
+ |""" .stripMargin
145
+ val fill_sql =
146
+ """
147
+ |insert into test values(array(1, 2, null)), (array(null, 2,3, 5))
148
+ |""" .stripMargin
149
+ val query_sql =
150
+ """
151
+ |select array_distinct(dec) from test;
152
+ |""" .stripMargin
153
+ spark.sql(create_sql)
154
+ spark.sql(fill_sql)
155
+ compareResultsAgainstVanillaSpark(query_sql, true , { _ => })
156
+ }
213
157
}
214
158
215
159
test(" intersect all" ) {
216
- spark.sql( " create table t1 (a int, b string) using parquet " )
217
- spark.sql(" insert into t1 values (1, '1'),(2, '2'),(3, '3'),(4, '4'),(5, '5'),(6, '6') " )
218
- spark.sql(" create table t2 (a int, b string) using parquet " )
219
- spark.sql(" insert into t2 values (4, '4'),(5, '5'),(6, '6'),(7, '7'),(8, '8'),(9, '9') " )
220
- runQueryAndCompare(
221
- """
222
- |SELECT a,b FROM t1 INTERSECT ALL SELECT a,b FROM t2
223
- | """ .stripMargin
224
- )(df => checkFallbackOperators(df, 0 ))
225
- spark.sql( " drop table t1 " )
226
- spark.sql( " drop table t2 " )
160
+ withTable( " t1 " , " t2 " ) {
161
+ spark.sql(" create table t1 (a int, b string) using parquet " )
162
+ spark.sql(" insert into t1 values (1, '1'),(2, '2'),(3, '3'),(4, '4'),(5, '5'),(6, '6') " )
163
+ spark.sql(" create table t2 (a int, b string) using parquet " )
164
+ spark.sql( " insert into t2 values (4, '4'),(5, '5'),(6, '6'),(7, '7'),(8, '8'),(9, '9') " )
165
+ runQueryAndCompare(
166
+ """
167
+ |SELECT a,b FROM t1 INTERSECT ALL SELECT a,b FROM t2
168
+ | """ .stripMargin
169
+ )(df => checkFallbackOperators(df, 0 ) )
170
+ }
227
171
}
228
172
229
173
test(" array decimal32 CH column to row" ) {
0 commit comments