[
https://issues.apache.org/jira/browse/HIVE-27234?focusedWorklogId=859547&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-859547
]
ASF GitHub Bot logged work on HIVE-27234:
-----------------------------------------
Author: ASF GitHub Bot
Created on: 28/Apr/23 04:46
Start Date: 28/Apr/23 04:46
Worklog Time Spent: 10m
Work Description: ayushtkn commented on code in PR #4216:
URL: https://github.com/apache/hive/pull/4216#discussion_r1179917689
##########
iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergBranchOperation.java:
##########
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.mr.hive;
+
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+import org.apache.iceberg.SnapshotRef;
+import org.apache.iceberg.Table;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestHiveIcebergBranchOperation extends
HiveIcebergStorageHandlerWithEngineBase {
+
+ @Test
Review Comment:
Do you need so many tests? Just create a table once and can create multiple
branches of that with different parameters ?
##########
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java:
##########
@@ -676,6 +678,32 @@ public void
executeOperation(org.apache.hadoop.hive.ql.metadata.Table hmsTable,
}
}
+ @Override
+ public void createBranchOperation(org.apache.hadoop.hive.ql.metadata.Table
hmsTable,
+ AlterTableCreateBranchSpec createBranchSpec) {
+ TableDesc tableDesc = Utilities.getTableDesc(hmsTable);
+ Table icebergTable = IcebergTableUtil.getTable(conf,
tableDesc.getProperties());
+
+ String branchName = createBranchSpec.getBranchName();
+ ManageSnapshots manageSnapshots = icebergTable.manageSnapshots();
+ Long snapShotId = Optional.ofNullable(createBranchSpec.getSnapshotId())
+ .orElse(icebergTable.currentSnapshot().snapshotId());
Review Comment:
What happens to this in case of any empty table? Just a create iceberg table
and nothing. As far as I remember in that case currentSnapshot is null? Just
give a check once.
##########
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java:
##########
@@ -676,6 +678,32 @@ public void
executeOperation(org.apache.hadoop.hive.ql.metadata.Table hmsTable,
}
}
+ @Override
+ public void createBranchOperation(org.apache.hadoop.hive.ql.metadata.Table
hmsTable,
+ AlterTableCreateBranchSpec createBranchSpec) {
+ TableDesc tableDesc = Utilities.getTableDesc(hmsTable);
+ Table icebergTable = IcebergTableUtil.getTable(conf,
tableDesc.getProperties());
+
+ String branchName = createBranchSpec.getBranchName();
+ ManageSnapshots manageSnapshots = icebergTable.manageSnapshots();
+ Long snapShotId = Optional.ofNullable(createBranchSpec.getSnapshotId())
+ .orElse(icebergTable.currentSnapshot().snapshotId());
+ LOG.info("Creating branch {} on iceberg table {}.{}", branchName,
hmsTable.getDbName(),
+ hmsTable.getTableName());
+ manageSnapshots.createBranch(branchName, snapShotId);
Review Comment:
nit:
snapShotId -> snapshotId
##########
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/branch/create/AlterTableCreateBranchAnalyzer.java:
##########
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.branch.create;
+
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AlterTableCreateBranchSpec;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
[email protected](types =
HiveParser.TOK_ALTERTABLE_CREATE_BRANCH)
+public class AlterTableCreateBranchAnalyzer extends AbstractAlterTableAnalyzer
{
+
+ public AlterTableCreateBranchAnalyzer(QueryState queryState) throws
SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ protected void analyzeCommand(TableName tableName, Map<String, String>
partitionSpec, ASTNode command)
+ throws SemanticException {
+ Table table = getTable(tableName);
+ validateAlterTableType(table, AlterTableType.CREATEBRANCH, false);
+ if (!"ICEBERG".equalsIgnoreCase(table.getParameters().get("table_type"))) {
+ throw new SemanticException("Cannot perform ALTER CREATE BRANCH
statement on non-iceberg table.");
+ }
+ inputs.add(new ReadEntity(table));
+
+ String branchName = command.getChild(0).getText();
+ Long snapshotId = null;
+ Long maxRefAgeMs = null;
+ Integer minSnapshotsToKeep = null;
+ Long maxSnapshotAgeMs = null;
+ for (int i = 1; i < command.getChildCount(); i++) {
+ ASTNode childNode = (ASTNode) command.getChild(i);
+ switch (childNode.getToken().getType()) {
+ case HiveParser.TOK_AS_OF_VERSION_BRANCH:
+ snapshotId = Long.valueOf(childNode.getChild(0).getText());
+ break;
+ case HiveParser.TOK_RETAIN:
+ String maxRefAge = childNode.getChild(0).getText();
+ String timeUnitOfBranchRetain = childNode.getChild(1).getText();
+ maxRefAgeMs =
TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)).toMillis(Long.valueOf(maxRefAge));
+ break;
+ case HiveParser.TOK_WITH_SNAPSHOT_RETENTION:
+ minSnapshotsToKeep = Integer.valueOf(childNode.getChild(0).getText());
+ if (childNode.getChildren().size() > 1) {
+ String maxSnapshotAge = childNode.getChild(1).getText();
+ String timeUnitOfSnapshotsRetention =
childNode.getChild(2).getText();
+ maxSnapshotAgeMs =
TimeUnit.valueOf(timeUnitOfSnapshotsRetention.toUpperCase(Locale.ENGLISH)).toMillis(Long.valueOf(maxSnapshotAge));
+ }
+ break;
+ default:
+ throw new SemanticException("Unrecognized token in ALTER CREATE BRANCH
statement");
+ }
+ }
+
+ AlterTableCreateBranchSpec spec = new
AlterTableCreateBranchSpec(branchName, snapshotId, maxRefAgeMs,
minSnapshotsToKeep, maxSnapshotAgeMs);
Review Comment:
line length should be less than 120 in hive
##########
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/branch/create/AlterTableCreateBranchAnalyzer.java:
##########
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.branch.create;
+
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AlterTableCreateBranchSpec;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
[email protected](types =
HiveParser.TOK_ALTERTABLE_CREATE_BRANCH)
+public class AlterTableCreateBranchAnalyzer extends AbstractAlterTableAnalyzer
{
+
+ public AlterTableCreateBranchAnalyzer(QueryState queryState) throws
SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ protected void analyzeCommand(TableName tableName, Map<String, String>
partitionSpec, ASTNode command)
+ throws SemanticException {
+ Table table = getTable(tableName);
+ validateAlterTableType(table, AlterTableType.CREATEBRANCH, false);
+ if (!"ICEBERG".equalsIgnoreCase(table.getParameters().get("table_type"))) {
+ throw new SemanticException("Cannot perform ALTER CREATE BRANCH
statement on non-iceberg table.");
+ }
+ inputs.add(new ReadEntity(table));
+
+ String branchName = command.getChild(0).getText();
+ Long snapshotId = null;
+ Long maxRefAgeMs = null;
+ Integer minSnapshotsToKeep = null;
+ Long maxSnapshotAgeMs = null;
+ for (int i = 1; i < command.getChildCount(); i++) {
+ ASTNode childNode = (ASTNode) command.getChild(i);
+ switch (childNode.getToken().getType()) {
+ case HiveParser.TOK_AS_OF_VERSION_BRANCH:
+ snapshotId = Long.valueOf(childNode.getChild(0).getText());
+ break;
+ case HiveParser.TOK_RETAIN:
+ String maxRefAge = childNode.getChild(0).getText();
+ String timeUnitOfBranchRetain = childNode.getChild(1).getText();
+ maxRefAgeMs =
TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)).toMillis(Long.valueOf(maxRefAge));
+ break;
+ case HiveParser.TOK_WITH_SNAPSHOT_RETENTION:
+ minSnapshotsToKeep = Integer.valueOf(childNode.getChild(0).getText());
+ if (childNode.getChildren().size() > 1) {
+ String maxSnapshotAge = childNode.getChild(1).getText();
+ String timeUnitOfSnapshotsRetention =
childNode.getChild(2).getText();
+ maxSnapshotAgeMs =
TimeUnit.valueOf(timeUnitOfSnapshotsRetention.toUpperCase(Locale.ENGLISH)).toMillis(Long.valueOf(maxSnapshotAge));
Review Comment:
use. ```Long.parseLong(maxSnapshotAge)``
##########
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/branch/create/AlterTableCreateBranchAnalyzer.java:
##########
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.branch.create;
+
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AlterTableCreateBranchSpec;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
[email protected](types =
HiveParser.TOK_ALTERTABLE_CREATE_BRANCH)
+public class AlterTableCreateBranchAnalyzer extends AbstractAlterTableAnalyzer
{
+
+ public AlterTableCreateBranchAnalyzer(QueryState queryState) throws
SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ protected void analyzeCommand(TableName tableName, Map<String, String>
partitionSpec, ASTNode command)
+ throws SemanticException {
+ Table table = getTable(tableName);
+ validateAlterTableType(table, AlterTableType.CREATEBRANCH, false);
+ if (!"ICEBERG".equalsIgnoreCase(table.getParameters().get("table_type"))) {
Review Comment:
Change to
```
if
(!HiveMetaHook.ICEBERG.equalsIgnoreCase(table.getParameters().get(HiveMetaHook.TABLE_TYPE)))
{
```
##########
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/branch/create/AlterTableCreateBranchAnalyzer.java:
##########
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.branch.create;
+
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AlterTableCreateBranchSpec;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
[email protected](types =
HiveParser.TOK_ALTERTABLE_CREATE_BRANCH)
+public class AlterTableCreateBranchAnalyzer extends AbstractAlterTableAnalyzer
{
+
+ public AlterTableCreateBranchAnalyzer(QueryState queryState) throws
SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ protected void analyzeCommand(TableName tableName, Map<String, String>
partitionSpec, ASTNode command)
+ throws SemanticException {
+ Table table = getTable(tableName);
+ validateAlterTableType(table, AlterTableType.CREATEBRANCH, false);
+ if (!"ICEBERG".equalsIgnoreCase(table.getParameters().get("table_type"))) {
+ throw new SemanticException("Cannot perform ALTER CREATE BRANCH
statement on non-iceberg table.");
+ }
+ inputs.add(new ReadEntity(table));
+
+ String branchName = command.getChild(0).getText();
+ Long snapshotId = null;
+ Long maxRefAgeMs = null;
+ Integer minSnapshotsToKeep = null;
+ Long maxSnapshotAgeMs = null;
+ for (int i = 1; i < command.getChildCount(); i++) {
+ ASTNode childNode = (ASTNode) command.getChild(i);
+ switch (childNode.getToken().getType()) {
+ case HiveParser.TOK_AS_OF_VERSION_BRANCH:
+ snapshotId = Long.valueOf(childNode.getChild(0).getText());
+ break;
+ case HiveParser.TOK_RETAIN:
+ String maxRefAge = childNode.getChild(0).getText();
+ String timeUnitOfBranchRetain = childNode.getChild(1).getText();
+ maxRefAgeMs =
TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)).toMillis(Long.valueOf(maxRefAge));
Review Comment:
you need primitive long only, Long.valueOf does a parseLong to get a
primitive and then converts to Long, and here you would be converting back to
long, because toMillis to take primitive long only.
Use ``Long.parseLong(maxRefAge)``
Issue Time Tracking
-------------------
Worklog Id: (was: 859547)
Time Spent: 2.5h (was: 2h 20m)
> Iceberg: CREATE BRANCH SQL implementation
> ------------------------------------------
>
> Key: HIVE-27234
> URL: https://issues.apache.org/jira/browse/HIVE-27234
> Project: Hive
> Issue Type: Sub-task
> Components: Iceberg integration
> Reporter: zhangbutao
> Assignee: zhangbutao
> Priority: Major
> Labels: pull-request-available
> Time Spent: 2.5h
> Remaining Estimate: 0h
>
> Maybe we can follow spark sql about branch ddl implementation
> [https://github.com/apache/iceberg/pull/6617]
--
This message was sent by Atlassian Jira
(v8.20.10#820010)