mchades commented on code in PR #9460:
URL: https://github.com/apache/gravitino/pull/9460#discussion_r2612841848
##########
catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveTableOperations.java:
##########
@@ -18,132 +18,79 @@
*/
package org.apache.gravitino.catalog.hive;
+import static org.apache.gravitino.hive.HivePartition.extractPartitionValues;
+
import com.google.common.base.Preconditions;
import java.io.IOException;
import java.util.Arrays;
+import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
-import java.util.stream.IntStream;
import org.apache.gravitino.MetadataObjects;
import org.apache.gravitino.connector.TableOperations;
import org.apache.gravitino.exceptions.NoSuchPartitionException;
-import org.apache.gravitino.exceptions.NoSuchTableException;
import org.apache.gravitino.exceptions.PartitionAlreadyExistsException;
+import org.apache.gravitino.hive.HivePartition;
+import org.apache.gravitino.hive.HiveTable;
import org.apache.gravitino.rel.SupportsPartitions;
-import org.apache.gravitino.rel.expressions.literals.Literal;
-import org.apache.gravitino.rel.expressions.literals.Literals;
-import org.apache.gravitino.rel.expressions.transforms.Transforms;
import org.apache.gravitino.rel.partitions.IdentityPartition;
import org.apache.gravitino.rel.partitions.Partition;
-import org.apache.gravitino.rel.partitions.Partitions;
-import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.UnknownTableException;
-import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HiveTableOperations implements TableOperations,
SupportsPartitions {
public static final Logger LOG =
LoggerFactory.getLogger(HiveTableOperations.class);
- private static final String PARTITION_NAME_DELIMITER = "/";
- private static final String PARTITION_VALUE_DELIMITER = "=";
-
- private final HiveTable table;
+ private final HiveTableHandle tableHandle;
- public HiveTableOperations(HiveTable table) {
- Preconditions.checkArgument(table != null, "table must not be null");
- this.table = table;
+ public HiveTableOperations(HiveTableHandle tableHandle) {
+ Preconditions.checkArgument(tableHandle != null, "table must not be null");
+ this.tableHandle = tableHandle;
}
@Override
public String[] listPartitionNames() {
try {
- return table
+ return tableHandle
.clientPool()
- .run(
- c ->
- c.listPartitionNames(table.schemaName(), table.name(),
(short) -1)
- .toArray(new String[0]));
- } catch (TException | InterruptedException e) {
+ .run(c -> c.listPartitionNames(tableHandle.table(), (short)
-1).toArray(new String[0]));
+ } catch (InterruptedException e) {
throw new RuntimeException(
- "Failed to list partition names of table " + table.name() + "from
Hive Metastore", e);
+ "Failed to list partition names of table " + tableHandle.name() +
"from Hive Metastore",
+ e);
}
}
@Override
public Partition[] listPartitions() {
- List<org.apache.hadoop.hive.metastore.api.Partition> partitions;
try {
- partitions =
- table
- .clientPool()
- .run(c -> c.listPartitions(table.schemaName(), table.name(),
(short) -1));
- } catch (TException | InterruptedException e) {
- throw new RuntimeException(e);
+ return tableHandle
+ .clientPool()
+ .run(c -> c.listPartitions(tableHandle.table(), (short) -1))
+ .toArray(new Partition[0]);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(
+ "Failed to list partitions of table " + tableHandle.name() + "from
Hive Metastore", e);
}
- List<String> partCols =
-
table.buildPartitionKeys().stream().map(FieldSchema::getName).collect(Collectors.toList());
-
- return partitions.stream()
- .map(
- partition ->
- fromHivePartition(
- FileUtils.makePartName(partCols, partition.getValues()),
partition))
- .toArray(Partition[]::new);
}
@Override
public Partition getPartition(String partitionName) throws
NoSuchPartitionException {
try {
- org.apache.hadoop.hive.metastore.api.Partition partition =
- table
- .clientPool()
- .run(c -> c.getPartition(table.schemaName(), table.name(),
partitionName));
- return fromHivePartition(partitionName, partition);
+ return tableHandle.clientPool().run(c ->
c.getPartition(tableHandle.table(), partitionName));
- } catch (UnknownTableException e) {
- throw new NoSuchTableException(
Review Comment:
I see you are not handling exception `UnknownTableException`, is it because
new HiveClient directly throws exception `NoSuchTableException`?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]