Skip to content

Commit c9d3f11

Browse files
authored
[regression-test](framework) disable defining global variable in test… (apache#45840)
1 parent 8b35b0e commit c9d3f11

File tree

92 files changed

+220
-204
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

92 files changed

+220
-204
lines changed

regression-test/data/schema_change_p0/test_uniq_vals_schema_change.out

+3
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,9 @@
1111
-- !sc --
1212
3
1313

14+
-- !sc --
15+
3 2017-10-01 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 1 32 20 2
16+
1417
-- !sc --
1518
4 2017-10-01 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 1 32 20 2
1619

regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/ScriptSource.groovy

+8-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,14 @@ class GroovyFileSource implements ScriptSource {
3434

3535
@Override
3636
SuiteScript toScript(ScriptContext scriptContext, GroovyShell shell) {
37-
SuiteScript suiteScript = shell.parse(file) as SuiteScript
37+
def setPropertyFunction = '''
38+
\nvoid setProperty(String key, value) {
39+
throw new IllegalArgumentException("defined global variables in script are not allowed: ${key}")
40+
}
41+
'''
42+
def scriptContent = file.text
43+
scriptContent = scriptContent + setPropertyFunction
44+
SuiteScript suiteScript = shell.parse(scriptContent, file.getName()) as SuiteScript
3845
suiteScript.init(scriptContext)
3946
return suiteScript
4047
}

regression-test/suites/backup_restore/test_backup_restore_db.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ suite("test_backup_restore_db", "backup_restore") {
8181
syncer.waitAllRestoreFinish(dbName)
8282

8383
for (def tableName in tables) {
84-
result = sql "SELECT * FROM ${dbName}.${tableName}"
84+
def result = sql "SELECT * FROM ${dbName}.${tableName}"
8585
assertEquals(result.size(), numRows);
8686
sql "DROP TABLE ${dbName}.${tableName} FORCE"
8787
}

regression-test/suites/backup_restore/test_backup_restore_exclude.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ suite("test_backup_restore_exclude", "backup_restore") {
8888

8989
qt_select "SELECT * FROM ${dbName}.${backupExcludeTable} ORDER BY id"
9090
for (def tableName in tables) {
91-
result = sql "SELECT * FROM ${dbName}.${tableName}"
91+
def result = sql "SELECT * FROM ${dbName}.${tableName}"
9292
assertEquals(result.size(), numRows);
9393
sql "DROP TABLE ${dbName}.${tableName} FORCE"
9494
}

regression-test/suites/backup_restore/test_backup_restore_multi_tables.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ suite("test_backup_restore_multi_tables", "backup_restore") {
8484
syncer.waitAllRestoreFinish(dbName)
8585

8686
for (def tableName in tables) {
87-
result = sql "SELECT * FROM ${dbName}.${tableName}"
87+
def result = sql "SELECT * FROM ${dbName}.${tableName}"
8888
assertEquals(result.size(), numRows);
8989
sql "DROP TABLE ${dbName}.${tableName} FORCE"
9090
}

regression-test/suites/backup_restore/test_backup_restore_multi_tables_overwrite.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ suite("test_backup_restore_multi_tables_overwrite", "backup_restore") {
8686

8787
qt_select "SELECT * FROM ${dbName}.${firstTableName} ORDER BY id"
8888
for (def tableName in tables) {
89-
result = sql "SELECT * FROM ${dbName}.${tableName}"
89+
def result = sql "SELECT * FROM ${dbName}.${tableName}"
9090
assertEquals(result.size(), numRows);
9191
sql "DROP TABLE ${dbName}.${tableName} FORCE"
9292
}

regression-test/suites/backup_restore/test_backup_restore_mv.groovy

+5-5
Original file line numberDiff line numberDiff line change
@@ -57,13 +57,13 @@ suite("test_backup_restore_mv", "backup_restore") {
5757
"""
5858

5959
def alter_finished = false
60-
for (i = 0; i < 60 && !alter_finished; i++) {
60+
for (int i = 0; i < 60 && !alter_finished; i++) {
6161
result = sql_return_maparray "SHOW ALTER TABLE MATERIALIZED VIEW FROM ${dbName}"
6262
logger.info("result: ${result}")
63-
for (int i = 0; i < result.size(); i++) {
64-
if (result[i]['TableName'] == "${tableName}" &&
65-
result[i]['RollupIndexName'] == "${mvName}" &&
66-
result[i]['State'] == 'FINISHED') {
63+
for (int j = 0; j < result.size(); j++) {
64+
if (result[j]['TableName'] == "${tableName}" &&
65+
result[j]['RollupIndexName'] == "${mvName}" &&
66+
result[j]['State'] == 'FINISHED') {
6767
alter_finished = true
6868
break
6969
}

regression-test/suites/backup_restore/test_restore_mix_exists_and_new_table.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ suite("test_restore_mix_exists_and_new_table", "backup_restore") {
8484
syncer.waitAllRestoreFinish(dbName)
8585

8686
for (def tableName in tables) {
87-
result = sql "SELECT * FROM ${dbName}.${tableName}"
87+
def result = sql "SELECT * FROM ${dbName}.${tableName}"
8888
assertEquals(result.size(), numRows);
8989
sql "DROP TABLE ${dbName}.${tableName} FORCE"
9090
}

regression-test/suites/ccr_mow_syncer_p0/test_ingest_binlog.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ suite("test_mow_ingest_binlog") {
7878
}
7979

8080
target_sql " sync "
81-
res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
81+
def res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
8282
assertEquals(res.size(), insert_num)
8383

8484

regression-test/suites/ccr_syncer_p0/inverted_index/test_ingest_binlog.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ suite("test_ingest_binlog_index") {
152152
}
153153

154154
target_sql " sync "
155-
res = target_sql """SELECT * FROM ${tableName}"""
155+
def res = target_sql """SELECT * FROM ${tableName}"""
156156
if (tableName.contains("mow")) {
157157
assertEquals(res.size(), insert_data(tableName).size() / 2 as Integer)
158158
} else {

regression-test/suites/ccr_syncer_p0/test_ingest_binlog.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ suite("test_ingest_binlog") {
7777
}
7878

7979
target_sql " sync "
80-
res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
80+
def res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
8181
assertEquals(res.size(), insert_num)
8282

8383

regression-test/suites/cloud_p0/auth/test_disable_revoke_admin_auth.groovy

+1
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ suite("test_disable_revoke_admin_auth", "cloud_auth") {
2222
sql """create user ${user} identified by 'Cloud12345' default role 'admin'"""
2323

2424
sql "sync"
25+
def result
2526

2627
try {
2728
result = sql """revoke 'admin' from 'admin'""";

regression-test/suites/compaction/test_full_compaction.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ suite("test_full_compaction") {
120120
for (def tablet in tablets) {
121121
String tablet_id = tablet.TabletId
122122
backend_id = tablet.BackendId
123-
times = 1
123+
def times = 1
124124

125125
do{
126126
(code, out, err) = be_run_full_compaction(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id), tablet_id)

regression-test/suites/correctness/test_trim_new_parameters.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,6 @@ suite("test_trim_new_parameters") {
6868
rtrim = sql "select rtrim('bcTTTabcabc','abc')"
6969
assertEquals(rtrim[0][0], 'bcTTT')
7070

71-
trim_one = sql "select trim('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaabcTTTabcabcaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa','a')"
71+
def trim_one = sql "select trim('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaabcTTTabcabcaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa','a')"
7272
assertEquals(trim_one[0][0], 'baaaaaaaaaaabcTTTabcabcaaaaaaaaaaaaaaaaaaaaaaaaaab')
7373
}

regression-test/suites/ddl_p0/test_create_table_properties.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@ suite("test_create_table_properties") {
336336
)
337337
"""
338338
sql """ insert into ${bool_tab} values (1, '2020-12-12 12:12:12', '2000-01-01 12:12:12.123456'), (0, '20201212 121212', '2000-01-01'), (1, '20201212121212', '2000-01-01'), (0, 'AaA', '2000-01-01') """
339-
result = sql "show partitions from ${bool_tab}"
339+
def result = sql "show partitions from ${bool_tab}"
340340
logger.info("${result}")
341341
assertEquals(result.size(), 2)
342342

regression-test/suites/export/test_array_export.groovy

+3-3
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ suite("test_array_export", "export") {
136136
def check_export_result = {checklabel->
137137
max_try_milli_secs = 15000
138138
while(max_try_milli_secs) {
139-
result = sql "show export where label='${checklabel}'"
139+
def result = sql "show export where label='${checklabel}'"
140140
if(result[0][2] == "FINISHED") {
141141
break
142142
} else {
@@ -171,7 +171,7 @@ suite("test_array_export", "export") {
171171
} else {
172172
throw new IllegalStateException("""${outFilePath} already exists! """)
173173
}
174-
result = sql """
174+
def result = sql """
175175
SELECT * FROM ${tableName} t ORDER BY k1 INTO OUTFILE "file://${outFile}/";
176176
"""
177177
def url = result[0][3]
@@ -203,7 +203,7 @@ suite("test_array_export", "export") {
203203
path.delete();
204204
}
205205
if (csvFiles != "") {
206-
cmd = "rm -rf ${csvFiles}"
206+
def cmd = "rm -rf ${csvFiles}"
207207
sshExec("root", urlHost, cmd)
208208
}
209209
}

regression-test/suites/export/test_map_export.groovy

+2-2
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ suite("test_map_export", "export") {
9898
def result = sql """
9999
SELECT * FROM ${testTable} ORDER BY id INTO OUTFILE "file://${outFile}/";
100100
"""
101-
url = result[0][3]
101+
def url = result[0][3]
102102
urlHost = url.substring(8, url.indexOf("${outFile}"))
103103
if (backends.size() > 1) {
104104
// custer will scp files
@@ -146,7 +146,7 @@ suite("test_map_export", "export") {
146146
path.delete();
147147
}
148148
if (csvFiles != "") {
149-
cmd = "rm -rf ${csvFiles}"
149+
def cmd = "rm -rf ${csvFiles}"
150150
sshExec("root", urlHost, cmd)
151151
}
152152
}

regression-test/suites/export/test_struct_export.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ suite("test_struct_export", "export") {
151151
path.delete();
152152
}
153153
if (csvFiles != "") {
154-
cmd = "rm -rf ${csvFiles}"
154+
def cmd = "rm -rf ${csvFiles}"
155155
sshExec("root", urlHost, cmd)
156156
}
157157
}

regression-test/suites/export_p0/outfile/csv/test_outfile_empty_data.groovy

+7-7
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {
4545
String ak = getS3AK()
4646
String sk = getS3SK()
4747
String s3_endpoint = getS3Endpoint()
48-
String region = region = getS3Region()
48+
String region = getS3Region()
4949
String bucket = context.config.otherConfigs.get("s3BucketName");
5050

5151
// broker
@@ -67,8 +67,8 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {
6767
// select ... into outfile ...
6868
def uuid = UUID.randomUUID().toString()
6969

70-
hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
71-
uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"
70+
def hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
71+
def uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"
7272

7373
def res = sql """
7474
SELECT * FROM ${export_table_name} t ORDER BY user_id
@@ -87,8 +87,8 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {
8787
// select ... into outfile ...
8888
def uuid = UUID.randomUUID().toString()
8989

90-
hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
91-
uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"
90+
def hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
91+
def uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"
9292

9393
def res = sql """
9494
SELECT * FROM ${export_table_name} t ORDER BY user_id
@@ -106,8 +106,8 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {
106106

107107
def outfile_to_S3_directly = {
108108
// select ... into outfile ...
109-
s3_outfile_path = "${bucket}/outfile/csv/test-outfile-empty/"
110-
uri = "s3://${s3_outfile_path}/exp_"
109+
def s3_outfile_path = "${bucket}/outfile/csv/test-outfile-empty/"
110+
def uri = "s3://${s3_outfile_path}/exp_"
111111

112112
def res = sql """
113113
SELECT * FROM ${export_table_name} t ORDER BY user_id

regression-test/suites/export_p0/outfile/outfile_expr/test_outfile_expr_generate_col_name.groovy

+5-5
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
129129
"s3.access_key" = "${ak}"
130130
);
131131
"""
132-
outfile_url = res[0][3]
132+
def outfile_url = res[0][3]
133133

134134
check_outfile_data(outfile_url, outfile_format)
135135
check_outfile_column_name(outfile_url, outfile_format)
@@ -150,7 +150,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
150150
"s3.access_key" = "${ak}"
151151
);
152152
"""
153-
outfile_url = res[0][3]
153+
def outfile_url = res[0][3]
154154

155155
check_outfile_data(outfile_url, outfile_format)
156156
check_outfile_column_name(outfile_url, outfile_format)
@@ -171,7 +171,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
171171
"s3.access_key" = "${ak}"
172172
);
173173
"""
174-
outfile_url = res[0][3]
174+
def outfile_url = res[0][3]
175175

176176
check_outfile_data(outfile_url, outfile_format)
177177
check_outfile_column_name(outfile_url, outfile_format)
@@ -211,7 +211,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
211211
"s3.access_key" = "${ak}"
212212
);
213213
"""
214-
outfile_url = res[0][3]
214+
def outfile_url = res[0][3]
215215

216216
check_outfile_data(outfile_url, outfile_format)
217217
check_outfile_column_name(outfile_url, outfile_format)
@@ -235,7 +235,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
235235
"s3.access_key" = "${ak}"
236236
);
237237
"""
238-
outfile_url = res[0][3]
238+
def outfile_url = res[0][3]
239239

240240
check_outfile_data(outfile_url, outfile_format)
241241
check_outfile_column_name(outfile_url, outfile_format)

regression-test/suites/export_p0/test_export_basic.groovy

+4-4
Original file line numberDiff line numberDiff line change
@@ -414,11 +414,11 @@ suite("test_export_basic", "p0") {
414414
}
415415

416416
// 5. test order by and limit clause
417-
uuid1 = UUID.randomUUID().toString()
417+
def uuid1 = UUID.randomUUID().toString()
418418
outFilePath = """${outfile_path_prefix}_${uuid1}"""
419-
label1 = "label_${uuid1}"
420-
uuid2 = UUID.randomUUID().toString()
421-
label2 = "label_${uuid2}"
419+
def label1 = "label_${uuid1}"
420+
def uuid2 = UUID.randomUUID().toString()
421+
def label2 = "label_${uuid2}"
422422
try {
423423
// check export path
424424
check_path_exists.call("${outFilePath}")

regression-test/suites/export_p0/test_outfile_file_suffix.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ suite("test_outfile_file_suffix", "p0") {
4242

4343
def outFilePath = """s3://${bucket}/outfile_"""
4444
def csv_suffix_result = { file_suffix, file_format ->
45-
result = sql """
45+
def result = sql """
4646
select * from ${table_name}
4747
into outfile "${outFilePath}"
4848
FORMAT AS ${file_format}

regression-test/suites/export_p0/test_show_create_database.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
suite("test_show_create_database", "p0,external,hive,external_docker,external_docker_hive") {
1919

2020
sql """create database if not exists db_test"""
21-
result = sql """show create database db_test"""
21+
def result = sql """show create database db_test"""
2222
assertEquals(result.size(), 1)
2323
assertEquals(result[0][1], "CREATE DATABASE `db_test`")
2424

regression-test/suites/external_table_p0/hive/test_autoinc_broker_load.groovy

+11-11
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,15 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
2020

2121
String enabled = context.config.otherConfigs.get("enableHiveTest")
2222
if (enabled != null && enabled.equalsIgnoreCase("true")) {
23-
brokerName = getBrokerName()
24-
hdfsUser = getHdfsUser()
25-
hdfsPasswd = getHdfsPasswd()
26-
hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
27-
externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
23+
def brokerName = getBrokerName()
24+
def hdfsUser = getHdfsUser()
25+
def hdfsPasswd = getHdfsPasswd()
26+
def hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
27+
def externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
2828

2929
def test_dir = "user/doris/preinstalled_data/data_case/autoinc"
3030

31-
def load_from_hdfs = {columns, testTable, label, testFile, format, brokerName, hdfsUser, hdfsPasswd ->
31+
def load_from_hdfs = {columns, testTable, label, testFile, format ->
3232
def result1= sql """ LOAD LABEL ${label} (
3333
DATA INFILE("hdfs://${externalEnvIp}:${hdfs_port}/${test_dir}/${testFile}")
3434
INTO TABLE ${testTable}
@@ -46,9 +46,9 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
4646
}
4747

4848
def wait_for_load_result = {checklabel, testTable ->
49-
max_try_milli_secs = 10000
49+
def max_try_milli_secs = 10000
5050
while(max_try_milli_secs) {
51-
result = sql "show load where label = '${checklabel}'"
51+
def result = sql "show load where label = '${checklabel}'"
5252
if(result[0][2] == "FINISHED") {
5353
break
5454
} else {
@@ -61,7 +61,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
6161
}
6262
}
6363

64-
table = "test_autoinc_broker_load"
64+
def table = "test_autoinc_broker_load"
6565
sql "drop table if exists ${table}"
6666
sql """ CREATE TABLE IF NOT EXISTS `${table}` (
6767
`id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID",
@@ -78,7 +78,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
7878
"enable_unique_key_merge_on_write" = "true") """
7979

8080
def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
81-
load_from_hdfs("name, value", table, test_load_label, "auto_inc_basic.csv", "csv", brokerName, hdfsUser, hdfsPasswd)
81+
load_from_hdfs("name, value", table, test_load_label, "auto_inc_basic.csv", "csv")
8282
wait_for_load_result(test_load_label, table)
8383
qt_sql "select * from ${table};"
8484
sql """ insert into ${table} values(0, "Bob", 123), (2, "Tom", 323), (4, "Carter", 523);"""
@@ -102,7 +102,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
102102
"storage_format" = "V2",
103103
"enable_unique_key_merge_on_write" = "true");"""
104104
test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
105-
load_from_hdfs("id, name, value", table, test_load_label, "auto_inc_with_null.csv", "csv", brokerName, hdfsUser, hdfsPasswd)
105+
load_from_hdfs("id, name, value", table, test_load_label, "auto_inc_with_null.csv", "csv")
106106
wait_for_load_result(test_load_label, table)
107107
sql "sync"
108108
qt_sql "select * from ${table};"

regression-test/suites/external_table_p0/hive/test_hive_parquet_alter_column.groovy

+1-1
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ suite("test_hive_parquet_alter_column", "p0,external,hive,external_docker,extern
4343

4444

4545

46-
types = ["int","smallint","tinyint","bigint","float","double","boolean","string","char","varchar","date","timestamp","decimal"]
46+
def types = ["int","smallint","tinyint","bigint","float","double","boolean","string","char","varchar","date","timestamp","decimal"]
4747

4848
for( String type1 in types) {
4949
qt_desc """ desc parquet_alter_column_to_${type1} ; """

0 commit comments

Comments
 (0)