This is an automated email from the ASF dual-hosted git repository.
ayushsaxena pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 84155271014 HIVE-28132: Iceberg: Add support for Replace Tag. (#5222).
(Ayush Saxena, reviewed by Butao Zhang)
84155271014 is described below
commit 8415527101432bb5bf14b3c2a318a2cc40801b9a
Author: Ayush Saxena <[email protected]>
AuthorDate: Mon May 6 09:27:06 2024 +0530
HIVE-28132: Iceberg: Add support for Replace Tag. (#5222). (Ayush Saxena,
reviewed by Butao Zhang)
---
.../iceberg/mr/hive/HiveIcebergMetaHook.java | 2 +-
.../iceberg/mr/hive/HiveIcebergStorageHandler.java | 18 ++-
...BranchExec.java => IcebergSnapshotRefExec.java} | 55 +++++++-
.../org/apache/iceberg/mr/hive/IcebergTagExec.java | 67 ----------
.../mr/hive/TestHiveIcebergTagOperation.java | 45 +++++++
.../results/positive/replace_iceberg_branch.q.out | 30 ++---
.../hadoop/hive/ql/parse/AlterClauseParser.g | 14 +-
.../org/apache/hadoop/hive/ql/parse/HiveParser.g | 2 +-
.../hadoop/hive/ql/ddl/table/AlterTableType.java | 2 +-
.../AlterTableReplaceSnapshotRefAnalyzer.java | 141 +++++++++++++++++++++
.../AlterTableReplaceBranchRefAnalyzer.java | 105 ---------------
.../hive/ql/parse/AlterTableSnapshotRefSpec.java | 26 ++--
.../apache/hadoop/hive/ql/plan/HiveOperation.java | 2 +-
.../authorization/plugin/HiveOperationType.java | 2 +-
.../plugin/sqlstd/Operation2Privilege.java | 2 +-
15 files changed, 297 insertions(+), 216 deletions(-)
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
index 90b32603654..b27ab64d512 100644
---
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
+++
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java
@@ -153,7 +153,7 @@ public class HiveIcebergMetaHook implements HiveMetaHook {
AlterTableType.ADDPROPS, AlterTableType.DROPPROPS,
AlterTableType.SETPARTITIONSPEC,
AlterTableType.UPDATE_COLUMNS, AlterTableType.RENAME,
AlterTableType.EXECUTE, AlterTableType.CREATE_BRANCH,
AlterTableType.CREATE_TAG, AlterTableType.DROP_BRANCH,
AlterTableType.RENAME_BRANCH, AlterTableType.DROPPARTITION,
- AlterTableType.DROP_TAG, AlterTableType.COMPACT,
AlterTableType.REPLACE_BRANCH);
+ AlterTableType.DROP_TAG, AlterTableType.COMPACT,
AlterTableType.REPLACE_SNAPSHOTREF);
private static final List<String> MIGRATION_ALLOWED_SOURCE_FORMATS =
ImmutableList.of(
FileFormat.PARQUET.name().toLowerCase(),
FileFormat.ORC.name().toLowerCase(),
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
index fbc8ec2e01a..5e6d545b3ca 100644
---
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
+++
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
@@ -989,7 +989,7 @@ public class HiveIcebergStorageHandler implements
HiveStoragePredicateHandler, H
case CREATE_BRANCH:
AlterTableSnapshotRefSpec.CreateSnapshotRefSpec createBranchSpec =
(AlterTableSnapshotRefSpec.CreateSnapshotRefSpec)
alterTableSnapshotRefSpec.getOperationParams();
- IcebergBranchExec.createBranch(icebergTable, createBranchSpec);
+ IcebergSnapshotRefExec.createBranch(icebergTable, createBranchSpec);
break;
case CREATE_TAG:
Optional.ofNullable(icebergTable.currentSnapshot()).orElseThrow(() ->
new UnsupportedOperationException(
@@ -998,27 +998,31 @@ public class HiveIcebergStorageHandler implements
HiveStoragePredicateHandler, H
hmsTable.getTableName())));
AlterTableSnapshotRefSpec.CreateSnapshotRefSpec createTagSpec =
(AlterTableSnapshotRefSpec.CreateSnapshotRefSpec)
alterTableSnapshotRefSpec.getOperationParams();
- IcebergTagExec.createTag(icebergTable, createTagSpec);
+ IcebergSnapshotRefExec.createTag(icebergTable, createTagSpec);
break;
case DROP_BRANCH:
AlterTableSnapshotRefSpec.DropSnapshotRefSpec dropBranchSpec =
(AlterTableSnapshotRefSpec.DropSnapshotRefSpec)
alterTableSnapshotRefSpec.getOperationParams();
- IcebergBranchExec.dropBranch(icebergTable, dropBranchSpec);
+ IcebergSnapshotRefExec.dropBranch(icebergTable, dropBranchSpec);
break;
case RENAME_BRANCH:
AlterTableSnapshotRefSpec.RenameSnapshotrefSpec renameSnapshotrefSpec =
(AlterTableSnapshotRefSpec.RenameSnapshotrefSpec)
alterTableSnapshotRefSpec.getOperationParams();
- IcebergBranchExec.renameBranch(icebergTable, renameSnapshotrefSpec);
+ IcebergSnapshotRefExec.renameBranch(icebergTable,
renameSnapshotrefSpec);
break;
- case REPLACE_BRANCH:
+ case REPLACE_SNAPSHOTREF:
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec
replaceSnapshotrefSpec =
(AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec)
alterTableSnapshotRefSpec.getOperationParams();
- IcebergBranchExec.replaceBranch(icebergTable, replaceSnapshotrefSpec);
+ if (replaceSnapshotrefSpec.isReplaceBranch()) {
+ IcebergSnapshotRefExec.replaceBranch(icebergTable,
replaceSnapshotrefSpec);
+ } else {
+ IcebergSnapshotRefExec.replaceTag(icebergTable,
replaceSnapshotrefSpec);
+ }
break;
case DROP_TAG:
AlterTableSnapshotRefSpec.DropSnapshotRefSpec dropTagSpec =
(AlterTableSnapshotRefSpec.DropSnapshotRefSpec)
alterTableSnapshotRefSpec.getOperationParams();
- IcebergTagExec.dropTag(icebergTable, dropTagSpec);
+ IcebergSnapshotRefExec.dropTag(icebergTable, dropTagSpec);
break;
default:
throw new UnsupportedOperationException(String.format(
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergBranchExec.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergSnapshotRefExec.java
similarity index 70%
rename from
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergBranchExec.java
rename to
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergSnapshotRefExec.java
index 8f54aef4aa0..6332cbb767f 100644
---
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergBranchExec.java
+++
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergSnapshotRefExec.java
@@ -28,11 +28,11 @@ import org.apache.iceberg.util.SnapshotUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public class IcebergBranchExec {
+public class IcebergSnapshotRefExec {
- private static final Logger LOG =
LoggerFactory.getLogger(IcebergBranchExec.class);
+ private static final Logger LOG =
LoggerFactory.getLogger(IcebergSnapshotRefExec.class);
- private IcebergBranchExec() {
+ private IcebergSnapshotRefExec() {
}
/**
@@ -100,7 +100,7 @@ public class IcebergBranchExec {
public static void replaceBranch(Table table,
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec replaceSnapshotrefSpec)
{
- String sourceBranch = replaceSnapshotrefSpec.getSourceBranchName();
+ String sourceBranch = replaceSnapshotrefSpec.getSourceRefName();
ManageSnapshots manageSnapshots;
if (replaceSnapshotrefSpec.isReplaceBySnapshot()) {
long targetSnapshot = replaceSnapshotrefSpec.getTargetSnapshot();
@@ -111,6 +111,52 @@ public class IcebergBranchExec {
LOG.info("Replacing branch {} with branch {} on iceberg table {}",
sourceBranch, targetBranch, table.name());
manageSnapshots = table.manageSnapshots().replaceBranch(sourceBranch,
targetBranch);
}
+ setOptionalReplaceParams(replaceSnapshotrefSpec, manageSnapshots,
sourceBranch);
+ manageSnapshots.commit();
+ }
+
+ public static void createTag(Table table,
AlterTableSnapshotRefSpec.CreateSnapshotRefSpec createTagSpec) {
+ String tagName = createTagSpec.getRefName();
+ Long snapshotId = null;
+ if (createTagSpec.getSnapshotId() != null) {
+ snapshotId = createTagSpec.getSnapshotId();
+ } else if (createTagSpec.getAsOfTime() != null) {
+ snapshotId = SnapshotUtil.snapshotIdAsOfTime(table,
createTagSpec.getAsOfTime());
+ } else {
+ snapshotId = table.currentSnapshot().snapshotId();
+ }
+ LOG.info("Creating tag {} on iceberg table {} with snapshotId {}",
tagName, table.name(), snapshotId);
+ ManageSnapshots manageSnapshots = table.manageSnapshots();
+ manageSnapshots.createTag(tagName, snapshotId);
+ if (createTagSpec.getMaxRefAgeMs() != null) {
+ manageSnapshots.setMaxRefAgeMs(tagName, createTagSpec.getMaxRefAgeMs());
+ }
+
+ manageSnapshots.commit();
+ }
+
+ public static void dropTag(Table table,
AlterTableSnapshotRefSpec.DropSnapshotRefSpec dropTagSpec) {
+ String tagName = dropTagSpec.getRefName();
+ boolean ifExists = dropTagSpec.getIfExists();
+
+ SnapshotRef snapshotRef = table.refs().get(tagName);
+ if (snapshotRef != null || !ifExists) {
+ LOG.info("Dropping tag {} on iceberg table {}", tagName, table.name());
+ table.manageSnapshots().removeTag(tagName).commit();
+ }
+ }
+
+ public static void replaceTag(Table table,
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec replaceTagRefSpec) {
+ String sourceTag = replaceTagRefSpec.getSourceRefName();
+ long targetSnapshot = replaceTagRefSpec.getTargetSnapshot();
+ LOG.info("Replacing tag {} with snapshot {} on iceberg table {}",
sourceTag, targetSnapshot, table.name());
+ ManageSnapshots manageSnapshots =
table.manageSnapshots().replaceTag(sourceTag, targetSnapshot);
+ setOptionalReplaceParams(replaceTagRefSpec, manageSnapshots, sourceTag);
+ manageSnapshots.commit();
+ }
+
+ static void
setOptionalReplaceParams(AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec
replaceSnapshotrefSpec,
+ ManageSnapshots manageSnapshots, String sourceBranch) {
if (replaceSnapshotrefSpec.getMaxRefAgeMs() > 0) {
manageSnapshots.setMaxRefAgeMs(sourceBranch,
replaceSnapshotrefSpec.getMaxRefAgeMs());
}
@@ -120,6 +166,5 @@ public class IcebergBranchExec {
if (replaceSnapshotrefSpec.getMinSnapshotsToKeep() > 0) {
manageSnapshots.setMinSnapshotsToKeep(sourceBranch,
replaceSnapshotrefSpec.getMinSnapshotsToKeep());
}
- manageSnapshots.commit();
}
}
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTagExec.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTagExec.java
deleted file mode 100644
index 54e386af225..00000000000
---
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTagExec.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iceberg.mr.hive;
-
-import org.apache.hadoop.hive.ql.parse.AlterTableSnapshotRefSpec;
-import org.apache.iceberg.ManageSnapshots;
-import org.apache.iceberg.SnapshotRef;
-import org.apache.iceberg.Table;
-import org.apache.iceberg.util.SnapshotUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class IcebergTagExec {
-
- private static final Logger LOG =
LoggerFactory.getLogger(IcebergTagExec.class);
-
- private IcebergTagExec() {
- }
-
- public static void createTag(Table table,
AlterTableSnapshotRefSpec.CreateSnapshotRefSpec createTagSpec) {
- String tagName = createTagSpec.getRefName();
- Long snapshotId = null;
- if (createTagSpec.getSnapshotId() != null) {
- snapshotId = createTagSpec.getSnapshotId();
- } else if (createTagSpec.getAsOfTime() != null) {
- snapshotId = SnapshotUtil.snapshotIdAsOfTime(table,
createTagSpec.getAsOfTime());
- } else {
- snapshotId = table.currentSnapshot().snapshotId();
- }
- LOG.info("Creating tag {} on iceberg table {} with snapshotId {}",
tagName, table.name(), snapshotId);
- ManageSnapshots manageSnapshots = table.manageSnapshots();
- manageSnapshots.createTag(tagName, snapshotId);
- if (createTagSpec.getMaxRefAgeMs() != null) {
- manageSnapshots.setMaxRefAgeMs(tagName, createTagSpec.getMaxRefAgeMs());
- }
-
- manageSnapshots.commit();
- }
-
- public static void dropTag(Table table,
AlterTableSnapshotRefSpec.DropSnapshotRefSpec dropTagSpec) {
- String tagName = dropTagSpec.getRefName();
- boolean ifExists = dropTagSpec.getIfExists();
-
- SnapshotRef snapshotRef = table.refs().get(tagName);
- if (snapshotRef != null || !ifExists) {
- LOG.info("Dropping tag {} on iceberg table {}", tagName, table.name());
- table.manageSnapshots().removeTag(tagName).commit();
- }
- }
-}
diff --git
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTagOperation.java
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTagOperation.java
index 915a9a83641..36b4b00ee7c 100644
---
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTagOperation.java
+++
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergTagOperation.java
@@ -25,6 +25,8 @@ import java.util.concurrent.TimeUnit;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.SnapshotRef;
import org.apache.iceberg.Table;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
@@ -209,4 +211,47 @@ public class TestHiveIcebergTagOperation extends
HiveIcebergStorageHandlerWithEn
Assert.assertTrue(e.getMessage().contains("Tag does not exist:
test_tag_1"));
}
}
+
+ @Test
+ public void testReplaceTag() {
+ TableIdentifier identifier = TableIdentifier.of("default",
"testReplaceTag");
+ shell.executeStatement(
+ String.format("CREATE EXTERNAL TABLE %s (id INT) STORED BY iceberg %s
%s",
+ identifier.name(),
+ testTables.locationForCreateTableSQL(identifier),
+ testTables.propertiesForCreateTableSQL(ImmutableMap.of())));
+
+ shell.executeStatement(String.format("INSERT INTO TABLE %s
VALUES(1),(2),(3),(4)", identifier.name()));
+
+ org.apache.iceberg.Table icebergTable = testTables.loadTable(identifier);
+ icebergTable.refresh();
+ long id1 = icebergTable.currentSnapshot().snapshotId();
+ // Create a branch
+ shell.executeStatement(String.format("ALTER TABLE %s create tag tag1",
identifier.name()));
+ // Make one new insert to the main branch
+ shell.executeStatement(String.format("INSERT INTO TABLE %s VALUES(5),(6)",
identifier.name()));
+ icebergTable.refresh();
+ long id2 = icebergTable.currentSnapshot().snapshotId();
+
+ // Make another insert so that the commit isn't the last commit on the
branch
+ shell.executeStatement(String.format("INSERT INTO TABLE %s VALUES(7),(8)",
identifier.name()));
+
+ // Validate the original count on branch before replace
+ List<Object[]> result =
+ shell.executeStatement("SELECT COUNT(*) FROM
default.testReplaceTag.tag_tag1");
+ Assert.assertEquals(4L, result.get(0)[0]);
+ // Perform replace tag with snapshot id.
+ shell.executeStatement(
+ String.format("ALTER TABLE %s replace tag tag1 as of system_version
%s", identifier.name(), id2));
+ result = shell.executeStatement("SELECT COUNT(*) FROM
default.testReplaceTag.tag_tag1");
+ Assert.assertEquals(6L, result.get(0)[0]);
+
+ // Perform replace tag with retain
+ shell.executeStatement(
+ String.format("ALTER TABLE %s replace tag tag1 as of system_version %s
retain 2 days", identifier.name(), id1));
+ result = shell.executeStatement("SELECT COUNT(*) FROM
default.testReplaceTag.tag_tag1");
+ Assert.assertEquals(4L, result.get(0)[0]);
+ icebergTable.refresh();
+ Assert.assertEquals(TimeUnit.DAYS.toMillis(2),
icebergTable.refs().get("tag1").maxRefAgeMs().longValue());
+ }
}
diff --git
a/iceberg/iceberg-handler/src/test/results/positive/replace_iceberg_branch.q.out
b/iceberg/iceberg-handler/src/test/results/positive/replace_iceberg_branch.q.out
index 1d964903e22..6b2173d3c93 100644
---
a/iceberg/iceberg-handler/src/test/results/positive/replace_iceberg_branch.q.out
+++
b/iceberg/iceberg-handler/src/test/results/positive/replace_iceberg_branch.q.out
@@ -90,10 +90,10 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
4
44
PREHOOK: query: explain alter table ice01 replace branch branch1 as of branch
branch2
-PREHOOK: type: ALTERTABLE_REPLACEBRANCH
+PREHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
PREHOOK: Input: default@ice01
POSTHOOK: query: explain alter table ice01 replace branch branch1 as of branch
branch2
-POSTHOOK: type: ALTERTABLE_REPLACEBRANCH
+POSTHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
POSTHOOK: Input: default@ice01
STAGE DEPENDENCIES:
Stage-0 is a root stage
@@ -102,13 +102,13 @@ STAGE PLANS:
Stage: Stage-0
SnapshotRef Operation
table name: default.ice01
- spec: AlterTableSnapshotRefSpec{operationType=REPLACE_BRANCH,
operationParams=ReplaceSnapshotrefSpec{sourceBranch=branch1,
targetBranch=branch2}}
+ spec: AlterTableSnapshotRefSpec{operationType=REPLACE_SNAPSHOTREF,
operationParams=ReplaceSnapshotrefSpec{sourceRef=branch1, replace=Branch,
targetBranch=branch2}}
PREHOOK: query: alter table ice01 replace branch branch1 as of branch branch2
-PREHOOK: type: ALTERTABLE_REPLACEBRANCH
+PREHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
PREHOOK: Input: default@ice01
POSTHOOK: query: alter table ice01 replace branch branch1 as of branch branch2
-POSTHOOK: type: ALTERTABLE_REPLACEBRANCH
+POSTHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
POSTHOOK: Input: default@ice01
PREHOOK: query: select * from default.ice01.branch_branch1
PREHOOK: type: QUERY
@@ -138,10 +138,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
POSTHOOK: Output: default@ice01
PREHOOK: query: explain alter table ice01 replace branch branch1 as of branch
branch3 retain 5 days
-PREHOOK: type: ALTERTABLE_REPLACEBRANCH
+PREHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
PREHOOK: Input: default@ice01
POSTHOOK: query: explain alter table ice01 replace branch branch1 as of branch
branch3 retain 5 days
-POSTHOOK: type: ALTERTABLE_REPLACEBRANCH
+POSTHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
POSTHOOK: Input: default@ice01
STAGE DEPENDENCIES:
Stage-0 is a root stage
@@ -150,13 +150,13 @@ STAGE PLANS:
Stage: Stage-0
SnapshotRef Operation
table name: default.ice01
- spec: AlterTableSnapshotRefSpec{operationType=REPLACE_BRANCH,
operationParams=ReplaceSnapshotrefSpec{sourceBranch=branch1,
targetBranch=branch3, maxRefAgeMs=432000000}}
+ spec: AlterTableSnapshotRefSpec{operationType=REPLACE_SNAPSHOTREF,
operationParams=ReplaceSnapshotrefSpec{sourceRef=branch1, replace=Branch,
targetBranch=branch3, maxRefAgeMs=432000000}}
PREHOOK: query: alter table ice01 replace branch branch1 as of branch branch3
retain 5 days
-PREHOOK: type: ALTERTABLE_REPLACEBRANCH
+PREHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
PREHOOK: Input: default@ice01
POSTHOOK: query: alter table ice01 replace branch branch1 as of branch branch3
retain 5 days
-POSTHOOK: type: ALTERTABLE_REPLACEBRANCH
+POSTHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
POSTHOOK: Input: default@ice01
PREHOOK: query: select * from default.ice01.branch_branch1
PREHOOK: type: QUERY
@@ -187,10 +187,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
POSTHOOK: Output: default@ice01
PREHOOK: query: explain alter table ice01 replace branch branch1 as of branch
branch4 with snapshot retention 5 snapshots 6 days
-PREHOOK: type: ALTERTABLE_REPLACEBRANCH
+PREHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
PREHOOK: Input: default@ice01
POSTHOOK: query: explain alter table ice01 replace branch branch1 as of branch
branch4 with snapshot retention 5 snapshots 6 days
-POSTHOOK: type: ALTERTABLE_REPLACEBRANCH
+POSTHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
POSTHOOK: Input: default@ice01
STAGE DEPENDENCIES:
Stage-0 is a root stage
@@ -199,13 +199,13 @@ STAGE PLANS:
Stage: Stage-0
SnapshotRef Operation
table name: default.ice01
- spec: AlterTableSnapshotRefSpec{operationType=REPLACE_BRANCH,
operationParams=ReplaceSnapshotrefSpec{sourceBranch=branch1,
targetBranch=branch4, minSnapshotsToKeep=5, maxSnapshotAgeMs=518400000}}
+ spec: AlterTableSnapshotRefSpec{operationType=REPLACE_SNAPSHOTREF,
operationParams=ReplaceSnapshotrefSpec{sourceRef=branch1, replace=Branch,
targetBranch=branch4, minSnapshotsToKeep=5, maxSnapshotAgeMs=518400000}}
PREHOOK: query: alter table ice01 replace branch branch1 as of branch branch4
with snapshot retention 5 snapshots 6 days
-PREHOOK: type: ALTERTABLE_REPLACEBRANCH
+PREHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
PREHOOK: Input: default@ice01
POSTHOOK: query: alter table ice01 replace branch branch1 as of branch branch4
with snapshot retention 5 snapshots 6 days
-POSTHOOK: type: ALTERTABLE_REPLACEBRANCH
+POSTHOOK: type: ALTERTABLE_REPLACESNAPSHOTREF
POSTHOOK: Input: default@ice01
PREHOOK: query: select * from default.ice01.branch_branch1
PREHOOK: type: QUERY
diff --git
a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
index c6bbcfca385..5d3c837b0bd 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
@@ -82,6 +82,7 @@ alterTableStatementSuffix
| alterStatementSuffixConvert
| alterStatementSuffixRenameBranch
| alterStatementSuffixReplaceBranch
+ | alterStatementSuffixReplaceTag
;
alterTblPartitionStatementSuffix[boolean partition]
@@ -102,13 +103,13 @@ alterTblPartitionStatementSuffix[boolean partition]
| alterStatementSuffixAddCol
| alterStatementSuffixUpdateColumns
;
-
+
optimizeTableStatementSuffix
@init { gParent.pushMsg("optimize table statement suffix", state); }
@after { gParent.popMsg(state); }
: optimizeTblRewriteDataSuffix
;
-
+
optimizeTblRewriteDataSuffix
@init { gParent.msgs.push("compaction request"); }
@after { gParent.msgs.pop(); }
@@ -518,7 +519,14 @@ alterStatementSuffixReplaceBranch
@init { gParent.pushMsg("alter table replace branch", state); }
@after { gParent.popMsg(state); }
: KW_REPLACE KW_BRANCH sourceBranch=Identifier KW_AS KW_OF
((KW_SYSTEM_VERSION snapshotId=Number) | (KW_BRANCH branch=identifier))
refRetain? retentionOfSnapshots?
- -> ^(TOK_ALTERTABLE_REPLACE_BRANCH $sourceBranch KW_SYSTEM_VERSION?
$snapshotId? $branch? refRetain? retentionOfSnapshots?)
+ -> ^(TOK_ALTERTABLE_REPLACE_SNAPSHOTREF KW_BRANCH $sourceBranch
KW_SYSTEM_VERSION? $snapshotId? $branch? refRetain? retentionOfSnapshots?)
+ ;
+
+alterStatementSuffixReplaceTag
+@init { gParent.pushMsg("alter table replace tag", state); }
+@after { gParent.popMsg(state); }
+ : KW_REPLACE KW_TAG sourceBranch=Identifier KW_AS KW_OF KW_SYSTEM_VERSION
snapshotId=Number refRetain?
+ -> ^(TOK_ALTERTABLE_REPLACE_SNAPSHOTREF KW_TAG $sourceBranch $snapshotId
refRetain?)
;
alterStatementSuffixDropBranch
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index de6cf2c5efd..745c437d41b 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -222,9 +222,9 @@ TOK_ALTERTABLE_EXECUTE;
TOK_ALTERTABLE_CREATE_BRANCH;
TOK_ALTERTABLE_DROP_BRANCH;
TOK_ALTERTABLE_RENAME_BRANCH;
-TOK_ALTERTABLE_REPLACE_BRANCH;
TOK_ALTERTABLE_CREATE_TAG;
TOK_ALTERTABLE_DROP_TAG;
+TOK_ALTERTABLE_REPLACE_SNAPSHOTREF;
TOK_RETAIN;
TOK_WITH_SNAPSHOT_RETENTION;
TOK_ALTERTABLE_CONVERT;
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableType.java
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableType.java
index d0d104e0712..0926c6aa53a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableType.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableType.java
@@ -43,9 +43,9 @@ public enum AlterTableType {
CREATE_BRANCH("create branch"),
DROP_BRANCH("drop branch"),
RENAME_BRANCH("rename branch"),
- REPLACE_BRANCH("replace branch"),
CREATE_TAG("create tag"),
DROP_TAG("drop tag"),
+ REPLACE_SNAPSHOTREF("replace branch/tag"),
// constraint
ADD_CONSTRAINT("add constraint"),
DROP_CONSTRAINT("drop constraint"),
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/snapshotref/AlterTableReplaceSnapshotRefAnalyzer.java
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/snapshotref/AlterTableReplaceSnapshotRefAnalyzer.java
new file mode 100644
index 00000000000..f8ed734ef58
--- /dev/null
+++
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/snapshotref/AlterTableReplaceSnapshotRefAnalyzer.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.ddl.table.snapshotref;
+
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.jetbrains.annotations.NotNull;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc;
+import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AlterTableSnapshotRefSpec;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import static
org.apache.hadoop.hive.ql.parse.HiveParser_AlterClauseParser.KW_BRANCH;
+import static
org.apache.hadoop.hive.ql.parse.HiveParser_AlterClauseParser.KW_SYSTEM_VERSION;
+
[email protected](types =
HiveParser.TOK_ALTERTABLE_REPLACE_SNAPSHOTREF)
+public class AlterTableReplaceSnapshotRefAnalyzer extends
AbstractAlterTableAnalyzer {
+
+ protected AlterTableType alterTableType;
+
+ public AlterTableReplaceSnapshotRefAnalyzer(QueryState queryState) throws
SemanticException {
+ super(queryState);
+ alterTableType = AlterTableType.REPLACE_SNAPSHOTREF;
+ }
+
+ @Override
+ protected void analyzeCommand(TableName tableName, Map<String, String>
partitionSpec, ASTNode command)
+ throws SemanticException {
+ Table table = getTable(tableName);
+ DDLUtils.validateTableIsIceberg(table);
+ inputs.add(new ReadEntity(table));
+ validateAlterTableType(table, alterTableType, false);
+ AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec replaceSnapshotrefSpec;
+ boolean isReplaceBranch = command.getChild(0).getType() == KW_BRANCH;
+ String sourceRef = command.getChild(1).getText();
+ if (isReplaceBranch) {
+ replaceSnapshotrefSpec = getReplaceBranchSpec(command, sourceRef);
+ replaceSnapshotrefSpec.setIsReplaceBranch();
+ } else {
+ replaceSnapshotrefSpec = getReplaceTagSpec(command, sourceRef);
+ }
+
+
AlterTableSnapshotRefSpec<AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec>
alterTableSnapshotRefSpec =
+ new AlterTableSnapshotRefSpec<>(alterTableType,
replaceSnapshotrefSpec);
+ AbstractAlterTableDesc alterTableDesc =
+ new AlterTableSnapshotRefDesc(alterTableType, tableName,
alterTableSnapshotRefSpec);
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
alterTableDesc)));
+ }
+
+ @NotNull
+ private AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec
getReplaceTagSpec(ASTNode command, String sourceBranch)
+ throws SemanticException {
+ AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec replaceSnapshotrefSpec;
+ long targetSnapshot = Long.parseLong(command.getChild(2).getText());
+ replaceSnapshotrefSpec = new
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec(sourceBranch, targetSnapshot);
+
+ for (int childNodeNum = 3; childNodeNum < command.getChildCount();
childNodeNum++) {
+ AlterTableReplaceSnapshotRefAnalyzer.extractOptionalArguments(command,
replaceSnapshotrefSpec, childNodeNum,
+ alterTableType);
+ }
+ return replaceSnapshotrefSpec;
+ }
+
+ @NotNull
+ private AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec
getReplaceBranchSpec(ASTNode command,
+ String sourceBranch) throws SemanticException {
+ AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec replaceSnapshotrefSpec;
+ int childNodeNum;
+ if (command.getChild(2).getType() == KW_SYSTEM_VERSION) {
+ long targetSnapshot = Long.parseLong(command.getChild(3).getText());
+ replaceSnapshotrefSpec = new
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec(sourceBranch, targetSnapshot);
+ childNodeNum = 4;
+ } else {
+ String targetBranch = command.getChild(2).getText();
+ replaceSnapshotrefSpec = new
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec(sourceBranch, targetBranch);
+ childNodeNum = 3;
+ }
+
+ for (; childNodeNum < command.getChildCount(); childNodeNum++) {
+ extractOptionalArguments(command, replaceSnapshotrefSpec, childNodeNum,
alterTableType);
+ }
+ return replaceSnapshotrefSpec;
+ }
+
+ public static void extractOptionalArguments(ASTNode command,
+ AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec replaceSnapshotrefSpec,
int childNodeNum,
+ AlterTableType alterTableType) throws SemanticException {
+ ASTNode childNode = (ASTNode) command.getChild(childNodeNum);
+ switch (childNode.getToken().getType()) {
+ case HiveParser.TOK_RETAIN:
+ String maxRefAge = childNode.getChild(0).getText();
+ String timeUnitOfBranchRetain = childNode.getChild(1).getText();
+ long maxRefAgeMs =
+
TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)).toMillis(Long.parseLong(maxRefAge));
+ replaceSnapshotrefSpec.setMaxRefAgeMs(maxRefAgeMs);
+ break;
+ case HiveParser.TOK_WITH_SNAPSHOT_RETENTION:
+ int minSnapshotsToKeep =
Integer.parseInt(childNode.getChild(0).getText());
+ replaceSnapshotrefSpec.setMinSnapshotsToKeep(minSnapshotsToKeep);
+ if (childNode.getChildren().size() > 1) {
+ String maxSnapshotAge = childNode.getChild(1).getText();
+ String timeUnitOfSnapshotsRetention = childNode.getChild(2).getText();
+ long maxSnapshotAgeMs =
TimeUnit.valueOf(timeUnitOfSnapshotsRetention.toUpperCase(Locale.ENGLISH))
+ .toMillis(Long.parseLong(maxSnapshotAge));
+ replaceSnapshotrefSpec.setMaxSnapshotAgeMs(maxSnapshotAgeMs);
+ }
+ break;
+ default:
+ throw new SemanticException("Unrecognized token in ALTER " +
alterTableType.getName() + " statement");
+ }
+ }
+}
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/snapshotref/branch/replace/AlterTableReplaceBranchRefAnalyzer.java
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/snapshotref/branch/replace/AlterTableReplaceBranchRefAnalyzer.java
deleted file mode 100644
index 06d8aa470ce..00000000000
---
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/snapshotref/branch/replace/AlterTableReplaceBranchRefAnalyzer.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.ddl.table.snapshotref.branch.replace;
-
-import java.util.Locale;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.hadoop.hive.common.TableName;
-import org.apache.hadoop.hive.ql.QueryState;
-import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
-import org.apache.hadoop.hive.ql.ddl.DDLUtils;
-import org.apache.hadoop.hive.ql.ddl.DDLWork;
-import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
-import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc;
-import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
-import
org.apache.hadoop.hive.ql.ddl.table.snapshotref.AlterTableSnapshotRefDesc;
-import org.apache.hadoop.hive.ql.exec.TaskFactory;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.AlterTableSnapshotRefSpec;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-
-import static
org.apache.hadoop.hive.ql.parse.HiveParser_AlterClauseParser.KW_SYSTEM_VERSION;
-
[email protected](types =
HiveParser.TOK_ALTERTABLE_REPLACE_BRANCH)
-public class AlterTableReplaceBranchRefAnalyzer extends
AbstractAlterTableAnalyzer {
-
- protected AlterTableType alterTableType;
-
- public AlterTableReplaceBranchRefAnalyzer(QueryState queryState) throws
SemanticException {
- super(queryState);
- alterTableType = AlterTableType.REPLACE_BRANCH;
- }
-
- @Override
- protected void analyzeCommand(TableName tableName, Map<String, String>
partitionSpec, ASTNode command)
- throws SemanticException {
- Table table = getTable(tableName);
- DDLUtils.validateTableIsIceberg(table);
- inputs.add(new ReadEntity(table));
- validateAlterTableType(table, alterTableType, false);
- AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec replaceSnapshotrefSpec;
- String sourceBranch = command.getChild(0).getText();
- int childNodeNum;
- if (command.getChild(1).getType() == KW_SYSTEM_VERSION) {
- long targetSnapshot = Long.parseLong(command.getChild(2).getText());
- replaceSnapshotrefSpec = new
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec(sourceBranch, targetSnapshot);
- childNodeNum = 3;
- } else {
- String targetBranch = command.getChild(1).getText();
- replaceSnapshotrefSpec = new
AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec(sourceBranch, targetBranch);
- childNodeNum = 2;
- }
-
- for (; childNodeNum < command.getChildCount(); childNodeNum++) {
- ASTNode childNode = (ASTNode) command.getChild(childNodeNum);
- switch (childNode.getToken().getType()) {
- case HiveParser.TOK_RETAIN:
- String maxRefAge = childNode.getChild(0).getText();
- String timeUnitOfBranchRetain = childNode.getChild(1).getText();
- long maxRefAgeMs =
-
TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)).toMillis(Long.parseLong(maxRefAge));
- replaceSnapshotrefSpec.setMaxRefAgeMs(maxRefAgeMs);
- break;
- case HiveParser.TOK_WITH_SNAPSHOT_RETENTION:
- int minSnapshotsToKeep =
Integer.parseInt(childNode.getChild(0).getText());
- replaceSnapshotrefSpec.setMinSnapshotsToKeep(minSnapshotsToKeep);
- if (childNode.getChildren().size() > 1) {
- String maxSnapshotAge = childNode.getChild(1).getText();
- String timeUnitOfSnapshotsRetention =
childNode.getChild(2).getText();
- long maxSnapshotAgeMs =
TimeUnit.valueOf(timeUnitOfSnapshotsRetention.toUpperCase(Locale.ENGLISH))
- .toMillis(Long.parseLong(maxSnapshotAge));
- replaceSnapshotrefSpec.setMaxSnapshotAgeMs(maxSnapshotAgeMs);
- }
- break;
- default:
- throw new SemanticException("Unrecognized token in ALTER " +
alterTableType.getName() + " statement");
- }
- }
-
-
AlterTableSnapshotRefSpec<AlterTableSnapshotRefSpec.ReplaceSnapshotrefSpec>
alterTableSnapshotRefSpec =
- new AlterTableSnapshotRefSpec<>(alterTableType,
replaceSnapshotrefSpec);
- AbstractAlterTableDesc alterTableDesc =
- new AlterTableSnapshotRefDesc(alterTableType, tableName,
alterTableSnapshotRefSpec);
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
alterTableDesc)));
- }
-}
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableSnapshotRefSpec.java
b/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableSnapshotRefSpec.java
index dfe4c0d1071..675f9ee9d05 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableSnapshotRefSpec.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableSnapshotRefSpec.java
@@ -152,7 +152,7 @@ public class AlterTableSnapshotRefSpec<T> {
public static class ReplaceSnapshotrefSpec {
- private final String sourceBranch;
+ private final String sourceRef;
private String targetBranch = null;
private long targetSnapshot;
@@ -160,9 +160,10 @@ public class AlterTableSnapshotRefSpec<T> {
private long maxRefAgeMs = -1;
private int minSnapshotsToKeep = -1;
private long maxSnapshotAgeMs = -1;
+ private boolean isReplaceBranch;
- public String getSourceBranchName() {
- return sourceBranch;
+ public String getSourceRefName() {
+ return sourceRef;
}
public String getTargetBranchName() {
@@ -177,13 +178,13 @@ public class AlterTableSnapshotRefSpec<T> {
return targetSnapshot;
}
- public ReplaceSnapshotrefSpec(String sourceBranch, String targetBranch) {
- this.sourceBranch = sourceBranch;
+ public ReplaceSnapshotrefSpec(String sourceRef, String targetBranch) {
+ this.sourceRef = sourceRef;
this.targetBranch = targetBranch;
}
- public ReplaceSnapshotrefSpec(String sourceBranch, long targetSnapshot) {
- this.sourceBranch = sourceBranch;
+ public ReplaceSnapshotrefSpec(String sourceRef, long targetSnapshot) {
+ this.sourceRef = sourceRef;
this.targetSnapshot = targetSnapshot;
replaceBySnapshot = true;
}
@@ -218,7 +219,8 @@ public class AlterTableSnapshotRefSpec<T> {
@Override
public String toString() {
MoreObjects.ToStringHelper stringHelper =
MoreObjects.toStringHelper(this);
- stringHelper.add("sourceBranch", sourceBranch);
+ stringHelper.add("sourceRef", sourceRef);
+ stringHelper.add("replace", isReplaceBranch ? "Branch" : "Tag");
if (replaceBySnapshot) {
stringHelper.add("targetSnapshot", targetSnapshot);
} else {
@@ -235,5 +237,13 @@ public class AlterTableSnapshotRefSpec<T> {
}
return stringHelper.toString();
}
+
+ public void setIsReplaceBranch() {
+ this.isReplaceBranch = true;
+ }
+
+ public boolean isReplaceBranch() {
+ return isReplaceBranch;
+ }
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index edfa71da1c8..9c982a36550 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -81,7 +81,7 @@ public enum HiveOperation {
ALTERTABLE_CREATETAG("ALTERTABLE_CREATETAG",
HiveParser.TOK_ALTERTABLE_CREATE_TAG, null, null),
ALTERTABLE_DROPBRANCH("ALTERTABLE_DROPBRANCH",
HiveParser.TOK_ALTERTABLE_DROP_BRANCH, null, null),
ALTERTABLE_RENAMEBRANCH("ALTERTABLE_RENAMEBRANCH",
HiveParser.TOK_ALTERTABLE_RENAME_BRANCH, null, null),
- ALTERTABLE_REPLACEBRANCH("ALTERTABLE_REPLACEBRANCH",
HiveParser.TOK_ALTERTABLE_REPLACE_BRANCH, null, null),
+ ALTERTABLE_REPLACESNAPSHOTREF("ALTERTABLE_REPLACESNAPSHOTREF",
HiveParser.TOK_ALTERTABLE_REPLACE_SNAPSHOTREF, null, null),
ALTERTABLE_DROPTAG("ALTERTABLE_DROPTAG", HiveParser.TOK_ALTERTABLE_DROP_TAG,
null, null),
ALTERTABLE_CONVERT("ALTERTABLE_CONVERT", HiveParser.TOK_ALTERTABLE_CONVERT,
null, null),
ALTERTABLE_SERIALIZER("ALTERTABLE_SERIALIZER",
HiveParser.TOK_ALTERTABLE_SERIALIZER,
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
index bc1b40583da..2578c570787 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
@@ -142,7 +142,7 @@ public enum HiveOperationType {
ALTERTABLE_CREATEBRANCH,
ALTERTABLE_DROPBRANCH,
ALTERTABLE_RENAMEBRANCH,
- ALTERTABLE_REPLACEBRANCH,
+ ALTERTABLE_REPLACESNAPSHOTREF,
ALTERTABLE_CREATETAG,
ALTERTABLE_DROPTAG,
SHOW_COMPACTIONS,
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
index 191e3a3f93f..483be4f73d1 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
@@ -248,7 +248,7 @@ public class Operation2Privilege {
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
op2Priv.put(HiveOperationType.ALTERTABLE_RENAMEBRANCH,
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
- op2Priv.put(HiveOperationType.ALTERTABLE_REPLACEBRANCH,
+ op2Priv.put(HiveOperationType.ALTERTABLE_REPLACESNAPSHOTREF,
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
op2Priv.put(HiveOperationType.ALTERTABLE_DROPTAG,
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));