[ 
https://issues.apache.org/jira/browse/HIVE-22747?focusedWorklogId=385408&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-385408
 ]

ASF GitHub Bot logged work on HIVE-22747:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 11/Feb/20 19:54
            Start Date: 11/Feb/20 19:54
    Worklog Time Spent: 10m 
      Work Description: miklosgergely commented on pull request #882: 
HIVE-22747 Break up DDLSemanticAnalyzer - extract Table info and lock analyzers
URL: https://github.com/apache/hive/pull/882#discussion_r377863674
 
 

 ##########
 File path: 
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/DescTableAnalyzer.java
 ##########
 @@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.info.desc;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.table.info.TableInfoUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.HiveTableName;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+/**
+ * Analyzer for table describing commands.
+ *
+ * A query like this will generate a tree as follows
+ *   "describe formatted default.maptable partition (b=100) id;"
+ * TOK_TABTYPE
+ *   TOK_TABNAME --> root for tablename, 2 child nodes mean DB specified
+ *     default
+ *     maptable
+ *   TOK_PARTSPEC  --> root node for partition spec. else columnName
+ *     TOK_PARTVAL
+ *       b
+ *       100
+ *   id           --> root node for columnName
+ * formatted
+ */
+@DDLType(type=HiveParser.TOK_DESCTABLE)
+public class DescTableAnalyzer extends BaseSemanticAnalyzer {
+  public DescTableAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    ctx.setResFile(ctx.getLocalTmpPath());
+
+    ASTNode tableTypeExpr = (ASTNode) root.getChild(0);
+
+    TableName tableName = getTableName(tableTypeExpr);
+    Table table = getTable(tableName);
+
+    // process the second child,if exists, node to get partition spec(s)
+    Map<String, String> partitionSpec = getPartitionSpec(db, tableTypeExpr, 
tableName);
+    TableInfoUtils.validateTable(db, table, partitionSpec);
+
+    // process the third child node,if exists, to get partition spec(s)
+    String columnPath = getColumnPath(db, tableTypeExpr, tableName, 
partitionSpec);
+
+    boolean showColStats = false;
+    boolean isFormatted = false;
+    boolean isExt = false;
+    if (root.getChildCount() == 2) {
+      int descOptions = root.getChild(1).getType();
+      isFormatted = descOptions == HiveParser.KW_FORMATTED;
+      isExt = descOptions == HiveParser.KW_EXTENDED;
+      // in case of "DESCRIBE FORMATTED tablename column_name" statement, 
colPath will contain tablename.column_name.
+      // If column_name is not specified colPath will be equal to tableName.
+      // This is how we can differentiate if we are describing a table or 
column.
+      if (columnPath != null && isFormatted) {
+        showColStats = true;
+      }
+    }
+
+    inputs.add(new ReadEntity(table));
+
+    DescTableDesc desc = new DescTableDesc(ctx.getResFile(), tableName, 
partitionSpec, columnPath, isExt, isFormatted);
+    Task<?> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc));
+    rootTasks.add(task);
+
+    task.setFetchSource(true);
+    String schema = showColStats ? DescTableDesc.COLUMN_STATISTICS_SCHEMA : 
DescTableDesc.SCHEMA;
+    setFetchTask(createFetchTask(schema));
+  }
+
+  /** Process the first node to extract tablename, it is either TABLENAME or 
DBNAME.TABLENAME if db is given. */
+  private TableName getTableName(ASTNode tableTypeExpr) throws 
SemanticException {
 
 Review comment:
   Fixed.
 
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 385408)
    Time Spent: 2h  (was: 1h 50m)

> Break up DDLSemanticAnalyzer - extract Table info and lock analyzers
> --------------------------------------------------------------------
>
>                 Key: HIVE-22747
>                 URL: https://issues.apache.org/jira/browse/HIVE-22747
>             Project: Hive
>          Issue Type: Sub-task
>            Reporter: Miklos Gergely
>            Assignee: Miklos Gergely
>            Priority: Major
>              Labels: pull-request-available, refactor-ddl
>         Attachments: HIVE-22747.01.patch, HIVE-22747.02.patch, 
> HIVE-22747.03.patch
>
>          Time Spent: 2h
>  Remaining Estimate: 0h
>
> DDLSemanticAnalyzer is a huge class, more than 4000 lines long. The goal is 
> to refactor it in order to have everything cut into more handleable classes 
> under the packageĀ  org.apache.hadoop.hive.ql.exec.ddl:
>  * have a separate class for each analyzers
>  * have a package for each operation, containing an analyzer, a description, 
> and an operation, so the amount of classes under a package is more manageable
> Step #13: extract the table info and lock related analyzers from 
> DDLSemanticAnalyzer, and move them under the new package.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to