simhadri-g commented on code in PR #4346:
URL: https://github.com/apache/hive/pull/4346#discussion_r1223240156
##########
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java:
##########
@@ -1408,4 +1420,64 @@ Boolean hasAppendsOnly(Iterable<Snapshot> snapshots,
SnapshotContext since) {
return null;
}
+
+ @Override
+ public List<String> showPartitions(DDLOperationContext context,
org.apache.hadoop.hive.ql.metadata.Table hmstbl) {
+ Configuration confs = context.getConf();
+ Path path = new Path(hmstbl.getParameters().get("metadata_location"));
+ List<String> parts = Lists.newArrayList();
+ JobConf job = HiveTableUtil.getJobConf(confs, path, hmstbl);
+ Class<? extends InputFormat> formatter = hmstbl.getInputFormatClass();
+
+ try {
+ InputFormat inputFormat =
FetchOperator.getInputFormatFromCache(formatter, job);
+ InputSplit[] splits = inputFormat.getSplits(job, 1);
+ try (RecordReader<WritableComparable, Writable> reader =
inputFormat.getRecordReader(splits[0], job,
+ Reporter.NULL)) {
+ parts = getParts(context, job, reader, hmstbl);
+ }
+ return parts;
+ } catch (Exception e) {
+ LOG.warn("Warn: Unable to show partitions for iceberg table - ", e);
+ }
+ return parts;
+ }
+
+ @Override
+ public boolean supportsPartitions() {
+ return true;
+ }
+
+ private List<String> getParts(DDLOperationContext context, Configuration job,
+ RecordReader<WritableComparable, Writable> reader,
org.apache.hadoop.hive.ql.metadata.Table hmstbl)
+ throws Exception {
+
+ List<String> parts = Lists.newArrayList();
+ Writable value = reader.createValue();
+ WritableComparable key = reader.createKey();
+ boolean notEoF = true;
+ String prevRow = "";
+
+ try (FetchFormatter fetcher = new DefaultFetchFormatter()) {
+ fetcher.initialize(job, HiveTableUtil.getProps());
+ org.apache.hadoop.hive.ql.metadata.Table metaDataPartTable =
+ context.getDb().getTable(hmstbl.getDbName(), hmstbl.getTableName(),
"partitions", true);
+
+ while (notEoF) {
+ reader.next(key, value);
+ Deserializer currSerDe = metaDataPartTable.getDeserializer();
Review Comment:
fixed
##########
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java:
##########
@@ -1408,4 +1420,64 @@ Boolean hasAppendsOnly(Iterable<Snapshot> snapshots,
SnapshotContext since) {
return null;
}
+
+ @Override
+ public List<String> showPartitions(DDLOperationContext context,
org.apache.hadoop.hive.ql.metadata.Table hmstbl) {
+ Configuration confs = context.getConf();
+ Path path = new Path(hmstbl.getParameters().get("metadata_location"));
+ List<String> parts = Lists.newArrayList();
+ JobConf job = HiveTableUtil.getJobConf(confs, path, hmstbl);
+ Class<? extends InputFormat> formatter = hmstbl.getInputFormatClass();
+
+ try {
+ InputFormat inputFormat =
FetchOperator.getInputFormatFromCache(formatter, job);
+ InputSplit[] splits = inputFormat.getSplits(job, 1);
+ try (RecordReader<WritableComparable, Writable> reader =
inputFormat.getRecordReader(splits[0], job,
+ Reporter.NULL)) {
+ parts = getParts(context, job, reader, hmstbl);
+ }
+ return parts;
+ } catch (Exception e) {
+ LOG.warn("Warn: Unable to show partitions for iceberg table - ", e);
+ }
+ return parts;
+ }
+
+ @Override
+ public boolean supportsPartitions() {
+ return true;
+ }
+
+ private List<String> getParts(DDLOperationContext context, Configuration job,
+ RecordReader<WritableComparable, Writable> reader,
org.apache.hadoop.hive.ql.metadata.Table hmstbl)
+ throws Exception {
+
+ List<String> parts = Lists.newArrayList();
+ Writable value = reader.createValue();
+ WritableComparable key = reader.createKey();
+ boolean notEoF = true;
+ String prevRow = "";
+
+ try (FetchFormatter fetcher = new DefaultFetchFormatter()) {
+ fetcher.initialize(job, HiveTableUtil.getProps());
+ org.apache.hadoop.hive.ql.metadata.Table metaDataPartTable =
+ context.getDb().getTable(hmstbl.getDbName(), hmstbl.getTableName(),
"partitions", true);
+
+ while (notEoF) {
+ reader.next(key, value);
+ Deserializer currSerDe = metaDataPartTable.getDeserializer();
+ String[] row =
+ fetcher.convert(currSerDe.deserialize(value),
currSerDe.getObjectInspector()).toString().split("\t");
+ if (prevRow.equalsIgnoreCase(row[PART_IDX])) {
+ notEoF = false;
+ } else {
+ prevRow = row[0];
+ parts.add(HiveTableUtil.getParseData(row[PART_IDX], row[SPEC_IDX]));
+ }
+ }
+ }
+ Collections.sort(parts);
Review Comment:
Iceberg doesn't support a tree set, the iceberg audit throws an expectation
when we use it.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]