Skip to content

Commit c750cf5

Browse files
Review comments fixes
1 parent 5677f33 commit c750cf5

File tree

17 files changed

+151
-187
lines changed

17 files changed

+151
-187
lines changed

hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727

2828
import org.apache.hadoop.fs.Path;
2929
import org.apache.hadoop.hive.conf.HiveConf;
30+
import org.apache.hadoop.hive.metastore.api.Table;
3031
import org.apache.hadoop.hive.ql.ddl.table.partition.PartitionUtils;
3132
import org.apache.hadoop.hive.serde2.AbstractSerDe;
3233
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -73,7 +74,7 @@ class DynamicPartitionFileRecordWriterContainer extends FileRecordWriterContaine
7374
*/
7475
public DynamicPartitionFileRecordWriterContainer(
7576
RecordWriter<? super WritableComparable<?>, ? super Writable> baseWriter,
76-
TaskAttemptContext context, org.apache.hadoop.hive.metastore.api.Table tbl)
77+
TaskAttemptContext context, Table tbl)
7778
throws IOException, InterruptedException {
7879
super(baseWriter, context);
7980
maxDynamicPartitions = jobInfo.getMaxDynamicPartitions();
@@ -91,7 +92,7 @@ public DynamicPartitionFileRecordWriterContainer(
9192
this.dynamicObjectInspectors = new HashMap<String, ObjectInspector>();
9293
this.dynamicOutputJobInfo = new HashMap<String, OutputJobInfo>();
9394
this.HIVE_DEFAULT_PARTITION_VALUE = PartitionUtils.getDefaultPartitionName(tbl.getParameters(),
94-
HiveConf.getVar(context.getConfiguration(), HiveConf.ConfVars.DEFAULT_PARTITION_NAME));
95+
context.getConfiguration());
9596
}
9697

9798
@Override

iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -877,7 +877,7 @@ public DynamicPartitionCtx createDPContext(
877877
Table table = IcebergTableUtil.getTable(conf, tableDesc.getProperties());
878878

879879
DynamicPartitionCtx dpCtx = new DynamicPartitionCtx(Maps.newLinkedHashMap(),
880-
PartitionUtils.getDefaultPartitionName(hmsTable.getParameters(), (HiveConf) conf),
880+
PartitionUtils.getDefaultPartitionName(hmsTable.getParameters(), hiveConf),
881881
hiveConf.getIntVar(ConfVars.DYNAMIC_PARTITION_MAX_PARTS_PER_NODE));
882882

883883
if (table.spec().isPartitioned() &&

parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,7 @@ alterTableStatementSuffix
8585
| alterStatementSuffixRenameBranch
8686
| alterStatementSuffixReplaceBranch
8787
| alterStatementSuffixReplaceTag
88+
| alterStatementSuffixSetDefaultPartition
8889
;
8990

9091
alterTblPartitionStatementSuffix[boolean partition]
@@ -104,7 +105,6 @@ alterTblPartitionStatementSuffix[boolean partition]
104105
| alterStatementSuffixRenameCol
105106
| alterStatementSuffixAddCol
106107
| alterStatementSuffixDropCol
107-
| alterStatementSuffixSetDefaultPartition
108108
| alterStatementSuffixUpdateColumns
109109
;
110110

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/PartitionUtils.java

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import java.util.Map;
2525
import java.util.Set;
2626

27+
import org.apache.hadoop.conf.Configuration;
2728
import org.apache.hadoop.hive.conf.HiveConf;
2829
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
2930
import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -193,16 +194,12 @@ public static void addTablePartsOutputs(Hive db, Set<WriteEntity> outputs, Table
193194
}
194195
}
195196

196-
public static String getDefaultPartitionName(Map<String, String> tableParams, HiveConf conf) {
197-
return getDefaultPartitionName(tableParams, HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULT_PARTITION_NAME));
198-
}
199-
200-
public static String getDefaultPartitionName(Map<String, String> tableParams, String defaultPartitionName) {
197+
public static String getDefaultPartitionName(Map<String, String> tableParams, Configuration conf) {
201198
// Check if the table has an override for the default partition name
202199
if (tableParams != null && tableParams.containsKey(HiveStringUtils.DEFAULT_PARTITION_NAME)) {
203200
return tableParams.get(HiveStringUtils.DEFAULT_PARTITION_NAME);
204201
} else {
205-
return defaultPartitionName;
202+
return conf.get(HiveConf.ConfVars.DEFAULT_PARTITION_NAME.varname);
206203
}
207204
}
208205
}
Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,9 @@
3434
* Analyzer for set default partition command.
3535
*/
3636
@DDLType(types = HiveParser.TOK_ALTERTABLE_SETDEFAULTPARTITION)
37-
public class AlterTableSetDefaultPartitionAnalyser extends AbstractAlterTableAnalyzer {
37+
public class AlterTableSetDefaultPartitionAnalyzer extends AbstractAlterTableAnalyzer {
3838

39-
public AlterTableSetDefaultPartitionAnalyser(QueryState queryState) throws SemanticException {
39+
public AlterTableSetDefaultPartitionAnalyzer(QueryState queryState) throws SemanticException {
4040
super(queryState);
4141
}
4242

@@ -45,7 +45,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
4545
throws SemanticException {
4646
String tableLevelDefaultPartitionName = unescapeSQLString(command.getChild(0).getText());
4747
AlterTableSetDefaultPartitionDesc desc = new AlterTableSetDefaultPartitionDesc(tableName,
48-
tableLevelDefaultPartitionName);
48+
tableLevelDefaultPartitionName);
4949
addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
5050
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
5151
}

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/setdefaultpartition/AlterTableSetDefaultPartitionDesc.java

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -39,12 +39,7 @@ public AlterTableSetDefaultPartitionDesc(TableName tableName, String tableLevelD
3939
}
4040

4141
@Explain(displayName = "default partition name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
42-
public String tableLevelDefaultPartitionName() {
42+
public String getTableLevelDefaultPartitionName() {
4343
return tableLevelDefaultPartitionName;
4444
}
45-
46-
@Override
47-
public boolean mayNeedWriteId() {
48-
return true;
49-
}
5045
}

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/setdefaultpartition/AlterTableSetDefaultPartitionOperation.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ public AlterTableSetDefaultPartitionOperation(DDLOperationContext context, Alter
3939
protected void doAlteration(Table table, Partition partition) throws HiveException {
4040
if (table.isPartitioned()) {
4141
// put the tableLevelDefaultPartitionName in the table property
42-
table.getParameters().put(HiveStringUtils.DEFAULT_PARTITION_NAME, desc.tableLevelDefaultPartitionName());
42+
table.getParameters().put(HiveStringUtils.DEFAULT_PARTITION_NAME, desc.getTableLevelDefaultPartitionName());
4343
} else {
4444
throw new HiveException(ErrorMsg.NON_PARTITIONED_TABLE, table.getTableName());
4545
}

ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -322,8 +322,7 @@ protected void initializeOp(Configuration hconf) throws HiveException {
322322
}
323323

324324
if (conf.getTableMetadata() != null) {
325-
defaultPartitionName = PartitionUtils.getDefaultPartitionName(conf.getTableMetadata().getParameters(),
326-
HiveConf.getVar(hconf, HiveConf.ConfVars.DEFAULT_PARTITION_NAME));
325+
defaultPartitionName = PartitionUtils.getDefaultPartitionName(conf.getTableMetadata().getParameters(), hconf);
327326
} else {
328327
// If the table metadata is not available, we cannot determine the default partition name.
329328
defaultPartitionName = HiveConf.getVar(hconf, HiveConf.ConfVars.DEFAULT_PARTITION_NAME);

ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1571,7 +1571,7 @@ public List<Partition> dropPartitions(String catName, String dbName, String tblN
15711571
for (Pair<Integer, byte[]> pair : partExprs) {
15721572
byte[] expr = pair.getRight();
15731573
String filter = generateJDOFilter(table, expr, PartitionUtils.getDefaultPartitionName(table.getParameters(),
1574-
conf.get(HiveConf.ConfVars.DEFAULT_PARTITION_NAME.varname)));
1574+
conf));
15751575
List<Partition> partitions = tt.listPartitionsByFilter(filter);
15761576
for (Partition p : partitions) {
15771577
Partition droppedPartition = tt.dropPartition(p.getValues());

ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -579,7 +579,7 @@ private static CalcitePlanner getAnalyzer(HiveConf conf, Context ctx) throws Sem
579579
*/
580580
public static Map<Integer, List<ExprNodeGenericFuncDesc>> getFullPartitionSpecs(
581581
CommonTree ast, Table table, Configuration conf, boolean canGroupExprs) throws SemanticException {
582-
String defaultPartitionName = PartitionUtils.getDefaultPartitionName(table.getParameters(), (HiveConf)conf);
582+
String defaultPartitionName = PartitionUtils.getDefaultPartitionName(table.getParameters(), conf);
583583
Map<String, String> colTypes = new HashMap<>();
584584
List<FieldSchema> partitionKeys = table.hasNonNativePartitionSupport() ?
585585
table.getStorageHandler().getPartitionKeys(table) : table.getPartitionKeys();

0 commit comments

Comments
 (0)