Skip to content

Commit 0d2baee

Browse files
committed
HIVE-29413: Avoid code duplication by updating getPartCols method for iceberg tables
1 parent 51e4ab0 commit 0d2baee

16 files changed

+29
-35
lines changed

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsOperation.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,10 +66,7 @@ private List<FieldSchema> getColumnsByPattern() throws HiveException {
6666

6767
private List<FieldSchema> getCols() throws HiveException {
6868
Table table = context.getDb().getTable(desc.getTableName());
69-
List<FieldSchema> allColumns = new ArrayList<>();
70-
allColumns.addAll(table.getCols());
71-
allColumns.addAll(table.getPartCols());
72-
return allColumns;
69+
return new ArrayList<>(table.getAllCols());
7370
}
7471

7572
private Matcher getMatcher() {

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/TextDescTableFormatter.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -174,9 +174,7 @@ private void addPartitionData(DataOutputStream out, HiveConf conf, String column
174174
List<FieldSchema> partitionColumns = null;
175175
// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
176176
if (table.isPartitioned()) {
177-
partitionColumns = table.hasNonNativePartitionSupport() ?
178-
table.getStorageHandler().getPartitionKeys(table) :
179-
table.getPartCols();
177+
partitionColumns = table.getPartCols();
180178
}
181179
if (CollectionUtils.isNotEmpty(partitionColumns) &&
182180
conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)) {

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/PartitionUtils.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import java.util.List;
2424
import java.util.Map;
2525
import java.util.Set;
26-
import java.util.Map.Entry;
2726

2827
import org.apache.hadoop.hive.conf.HiveConf;
2928
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;

ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,9 +91,7 @@ public List<String> getValues() {
9191
values = new ArrayList<>();
9292

9393
// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
94-
for (FieldSchema fs : table.hasNonNativePartitionSupport()
95-
? table.getStorageHandler().getPartitionKeys(table)
96-
: table.getPartCols()) {
94+
for (FieldSchema fs : table.getPartCols()) {
9795
String val = partSpec.get(fs.getName());
9896
values.add(val);
9997
}

ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,8 @@ protected void initialize(Table table,
173173
// set default if location is not set and this is a physical
174174
// table partition (not a view partition)
175175
if (table.getDataLocation() != null) {
176-
Path partPath = new Path(table.getDataLocation(), Warehouse.makePartName(table.getPartCols(), tPartition.getValues()));
176+
Path partPath = new Path(table.getDataLocation(),
177+
Warehouse.makePartName(table.getPartCols(), tPartition.getValues()));
177178
tPartition.getSd().setLocation(partPath.toString());
178179
}
179180
}

ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -595,6 +595,9 @@ public boolean equals(Object obj) {
595595
}
596596

597597
public List<FieldSchema> getPartCols() {
598+
if (hasNonNativePartitionSupport()) {
599+
return getStorageHandler().getPartitionKeys(this);
600+
}
598601
List<FieldSchema> partKeys = tTable.getPartitionKeys();
599602
if (partKeys == null) {
600603
partKeys = new ArrayList<>();
@@ -610,9 +613,7 @@ public FieldSchema getPartColByName(String colName) {
610613
}
611614

612615
public List<String> getPartColNames() {
613-
List<FieldSchema> partCols = hasNonNativePartitionSupport() ?
614-
getStorageHandler().getPartitionKeys(this) : getPartCols();
615-
return partCols.stream().map(FieldSchema::getName)
616+
return getPartCols().stream().map(FieldSchema::getName)
616617
.collect(Collectors.toList());
617618
}
618619

@@ -761,10 +762,16 @@ private List<FieldSchema> getColsInternal(boolean forMs) {
761762
* @return List&lt;FieldSchema&gt;
762763
*/
763764
public List<FieldSchema> getAllCols() {
764-
ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>();
765-
f_list.addAll(getCols());
766-
f_list.addAll(getPartCols());
767-
return f_list;
765+
List<FieldSchema> allCols = new ArrayList<>(getCols());
766+
Set<String> colNames = allCols.stream()
767+
.map(FieldSchema::getName)
768+
.collect(Collectors.toSet());
769+
for (FieldSchema col : getPartCols()) {
770+
if (!colNames.contains(col.getName())) {
771+
allCols.add(col);
772+
}
773+
}
774+
return allCols;
768775
}
769776

770777
public void setPartCols(List<FieldSchema> partCols) {
@@ -812,7 +819,7 @@ public void setOutputFormatClass(String name) throws HiveException {
812819
}
813820

814821
public boolean isPartitioned() {
815-
return hasNonNativePartitionSupport() ? getStorageHandler().isPartitioned(this) :
822+
return hasNonNativePartitionSupport() ? getStorageHandler().isPartitioned(this) :
816823
CollectionUtils.isNotEmpty(getPartCols());
817824
}
818825

ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -807,8 +807,7 @@ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
807807
for (FieldNode col : cols) {
808808
int index = originalOutputColumnNames.indexOf(col.getFieldName());
809809
Table tab = cppCtx.getParseContext().getViewProjectToTableSchema().get(op);
810-
List<FieldSchema> fullFieldList = new ArrayList<FieldSchema>(tab.getCols());
811-
fullFieldList.addAll(tab.getPartCols());
810+
List<FieldSchema> fullFieldList = new ArrayList<>(tab.getAllCols());
812811
cppCtx.getParseContext().getColumnAccessInfo()
813812
.add(tab.getCompleteName(), fullFieldList.get(index).getName());
814813
}

ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,8 @@ private void analyzeAcidExport(ASTNode ast, Table exportTable, ASTNode tokRefOrN
175175
//now generate insert statement
176176
//insert into newTableName select * from ts <where partition spec>
177177
StringBuilder rewrittenQueryStr = generateExportQuery(
178-
newTable.getPartCols(), tokRefOrNameExportTable, (ASTNode) tokRefOrNameExportTable.parent, newTableName);
178+
newTable.getPartCols(),
179+
tokRefOrNameExportTable, (ASTNode) tokRefOrNameExportTable.parent, newTableName);
179180
ReparseResult rr = ParseUtils.parseRewrittenQuery(ctx, rewrittenQueryStr);
180181
Context rewrittenCtx = rr.rewrittenCtx;
181182
rewrittenCtx.setIsUpdateDeleteMerge(false); //it's set in parseRewrittenQuery()

ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424
import java.util.HashMap;
2525
import java.util.List;
2626
import java.util.Map;
27-
import java.util.stream.Collectors;
2827

2928
import org.apache.hadoop.hive.conf.HiveConf;
3029
import org.apache.hadoop.hive.metastore.api.FieldSchema;

ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -193,8 +193,7 @@ private static CharSequence genPartitionClause(Table tbl, List<TransformSpec> pa
193193

194194

195195
private static String getColTypeOf(Table tbl, String partKey) {
196-
for (FieldSchema fs : tbl.hasNonNativePartitionSupport() ?
197-
tbl.getStorageHandler().getPartitionKeys(tbl) : tbl.getPartitionKeys()) {
196+
for (FieldSchema fs : tbl.getPartCols()) {
198197
if (partKey.equalsIgnoreCase(fs.getName())) {
199198
return fs.getType().toLowerCase();
200199
}

0 commit comments

Comments
 (0)