@@ -20,15 +20,15 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
20
20
21
21
String enabled = context. config. otherConfigs. get(" enableHiveTest" )
22
22
if (enabled != null && enabled. equalsIgnoreCase(" true" )) {
23
- brokerName = getBrokerName()
24
- hdfsUser = getHdfsUser()
25
- hdfsPasswd = getHdfsPasswd()
26
- hdfs_port = context. config. otherConfigs. get(" hive2HdfsPort" )
27
- externalEnvIp = context. config. otherConfigs. get(" externalEnvIp" )
23
+ def brokerName = getBrokerName()
24
+ def hdfsUser = getHdfsUser()
25
+ def hdfsPasswd = getHdfsPasswd()
26
+ def hdfs_port = context. config. otherConfigs. get(" hive2HdfsPort" )
27
+ def externalEnvIp = context. config. otherConfigs. get(" externalEnvIp" )
28
28
29
29
def test_dir = " user/doris/preinstalled_data/data_case/autoinc"
30
30
31
- def load_from_hdfs = {columns , testTable , label , testFile , format , brokerName , hdfsUser , hdfsPasswd ->
31
+ def load_from_hdfs = {columns , testTable , label , testFile , format ->
32
32
def result1= sql """ LOAD LABEL ${ label} (
33
33
DATA INFILE("hdfs://${ externalEnvIp} :${ hdfs_port} /${ test_dir} /${ testFile} ")
34
34
INTO TABLE ${ testTable}
@@ -46,9 +46,9 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
46
46
}
47
47
48
48
def wait_for_load_result = {checklabel , testTable ->
49
- max_try_milli_secs = 10000
49
+ def max_try_milli_secs = 10000
50
50
while (max_try_milli_secs) {
51
- result = sql " show load where label = '${ checklabel} '"
51
+ def result = sql " show load where label = '${ checklabel} '"
52
52
if (result[0 ][2 ] == " FINISHED" ) {
53
53
break
54
54
} else {
@@ -61,7 +61,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
61
61
}
62
62
}
63
63
64
- table = " test_autoinc_broker_load"
64
+ def table = " test_autoinc_broker_load"
65
65
sql " drop table if exists ${ table} "
66
66
sql """ CREATE TABLE IF NOT EXISTS `${ table} ` (
67
67
`id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID",
@@ -78,7 +78,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
78
78
"enable_unique_key_merge_on_write" = "true") """
79
79
80
80
def test_load_label = UUID . randomUUID(). toString(). replaceAll(" -" , " " )
81
- load_from_hdfs(" name, value" , table, test_load_label, " auto_inc_basic.csv" , " csv" , brokerName, hdfsUser, hdfsPasswd )
81
+ load_from_hdfs(" name, value" , table, test_load_label, " auto_inc_basic.csv" , " csv" )
82
82
wait_for_load_result(test_load_label, table)
83
83
qt_sql " select * from ${ table} ;"
84
84
sql """ insert into ${ table} values(0, "Bob", 123), (2, "Tom", 323), (4, "Carter", 523);"""
@@ -102,7 +102,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
102
102
"storage_format" = "V2",
103
103
"enable_unique_key_merge_on_write" = "true");"""
104
104
test_load_label = UUID . randomUUID(). toString(). replaceAll(" -" , " " )
105
- load_from_hdfs(" id, name, value" , table, test_load_label, " auto_inc_with_null.csv" , " csv" , brokerName, hdfsUser, hdfsPasswd )
105
+ load_from_hdfs(" id, name, value" , table, test_load_label, " auto_inc_with_null.csv" , " csv" )
106
106
wait_for_load_result(test_load_label, table)
107
107
sql " sync"
108
108
qt_sql " select * from ${ table} ;"
0 commit comments