jcamachor commented on a change in pull request #1178:
URL: https://github.com/apache/hive/pull/1178#discussion_r445212697



##########
File path: ql/src/java/org/apache/hadoop/hive/ql/PreparePlanUtils.java
##########
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.io.*;
+import java.util.*;
+
+import org.apache.hadoop.hive.common.type.*;
+import org.apache.hadoop.hive.conf.*;
+import org.apache.hadoop.hive.ql.exec.*;
+import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.exec.tez.*;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.hooks.HookUtils;
+import org.apache.hadoop.hive.ql.io.AcidUtils;
+import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.optimizer.physical.*;
+import org.apache.hadoop.hive.ql.parse.*;
+import org.apache.hadoop.hive.ql.parse.type.*;
+import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
+import 
org.apache.hadoop.hive.ql.security.authorization.command.CommandAuthorizer;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.udf.generic.*;
+import org.apache.hadoop.hive.serde.*;
+import org.apache.hadoop.hive.serde2.typeinfo.*;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableMap;
+
+/**
+ * The compiler compiles the command, by creating a QueryPlan from a String 
command.
+ * Also opens a transaction if necessary.
+ */
+public class PreparePlanUtils {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(PreparePlanUtils.class);
+
+  private static Set<Operator<?>> getAllFetchOperators(FetchTask task) {
+    if (task.getWork().getSource() == null)  {
+      return Collections.EMPTY_SET;
+    }
+    Set<Operator<?>> operatorList =  new HashSet<>();
+    operatorList.add(task.getWork().getSource());
+    return 
AnnotateRunTimeStatsOptimizer.getAllOperatorsForSimpleFetch(operatorList);
+  }
+
+  /*
+   * Retrieve name for PREPARE/EXECUTE statement
+   * Make sure the tree is either EXECUTE or PREPARE
+   */
+  protected static String getPrepareStatementName(ASTNode tree) {
+    if (tree.getType() == HiveParser.TOK_EXPLAIN) {
+      tree = (ASTNode)tree.getChildren().get(0);
+    }
+    assert (tree.getType() == HiveParser.TOK_PREPARE
+        || tree.getType() == HiveParser.TOK_EXECUTE);
+    return ((ASTNode)tree.getChildren().get(1)).getText();
+  }
+
+  public static void bindDynamicParams(QueryPlan plan, Map<Integer, String> 
parameterMap) {
+    if (parameterMap == null ||  parameterMap.isEmpty()) {
+      //TODO: LOG
+       return;
+    }
+    Set<Operator<?>> operators = new HashSet<>();
+    if (plan.getFetchTask() != null) {
+      operators.addAll(getAllFetchOperators(plan.getFetchTask()));
+    }
+    List<Task<?>> allTasks = plan.getRootTasks();
+    List<TezTask> rootTasks = Utilities.getTezTasks(plan.getRootTasks());
+    for(Task task:allTasks) {
+      List<BaseWork> baseWorks = new ArrayList<>();
+      if (task instanceof ExplainTask) {
+        ExplainTask explainTask = (ExplainTask) task;
+        for (Task explainRootTask : explainTask.getWork().getRootTasks()) {
+          if (explainRootTask instanceof TezTask) {
+            TezTask explainTezTask = (TezTask) explainRootTask;
+            baseWorks.addAll(explainTezTask.getWork().getAllWork());
+          }
+        }
+      } else if (task instanceof TezTask) {
+        baseWorks = ((TezTask) task).getWork().getAllWork();
+      }
+      for (BaseWork baseWork : baseWorks) {
+        operators.addAll(baseWork.getAllOperators());
+      }
+    }
+
+    for (Operator<?> op : operators) {

Review comment:
       Filter expression associated with `TableScan` may contain parameters to 
replace too?
   
   I assume we ignore Join or Aggregate because in that case Calcite will push 
them below those operators or remove them during optimization phase?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/parse/type/ExprFactory.java
##########
@@ -112,40 +112,45 @@ protected boolean isAllConstants(List<T> exprs) {
    */
   protected abstract T createNullConstantExpr();
 
+  /**
+   * Creates a dynamic parameter expression with void type.
+   */
+  protected abstract T createDynamicParamExpr(int index);
+
   /**
    * Creates a boolean constant expression from input value.
    */
-  protected abstract T createBooleanConstantExpr(String value);
+  public abstract T createBooleanConstantExpr(String value);

Review comment:
       The visibility of these methods may not need to change if you can rely 
on the general `createConstantExpr`.

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/PreparePlanUtils.java
##########
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.io.*;
+import java.util.*;
+
+import org.apache.hadoop.hive.common.type.*;
+import org.apache.hadoop.hive.conf.*;
+import org.apache.hadoop.hive.ql.exec.*;
+import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.exec.tez.*;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.hooks.HookUtils;
+import org.apache.hadoop.hive.ql.io.AcidUtils;
+import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.optimizer.physical.*;
+import org.apache.hadoop.hive.ql.parse.*;
+import org.apache.hadoop.hive.ql.parse.type.*;
+import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
+import 
org.apache.hadoop.hive.ql.security.authorization.command.CommandAuthorizer;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.udf.generic.*;
+import org.apache.hadoop.hive.serde.*;
+import org.apache.hadoop.hive.serde2.typeinfo.*;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableMap;
+
+/**
+ * The compiler compiles the command, by creating a QueryPlan from a String 
command.
+ * Also opens a transaction if necessary.
+ */
+public class PreparePlanUtils {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(PreparePlanUtils.class);
+
+  private static Set<Operator<?>> getAllFetchOperators(FetchTask task) {
+    if (task.getWork().getSource() == null)  {
+      return Collections.EMPTY_SET;
+    }
+    Set<Operator<?>> operatorList =  new HashSet<>();
+    operatorList.add(task.getWork().getSource());
+    return 
AnnotateRunTimeStatsOptimizer.getAllOperatorsForSimpleFetch(operatorList);
+  }
+
+  /*
+   * Retrieve name for PREPARE/EXECUTE statement
+   * Make sure the tree is either EXECUTE or PREPARE
+   */
+  protected static String getPrepareStatementName(ASTNode tree) {
+    if (tree.getType() == HiveParser.TOK_EXPLAIN) {
+      tree = (ASTNode)tree.getChildren().get(0);
+    }
+    assert (tree.getType() == HiveParser.TOK_PREPARE
+        || tree.getType() == HiveParser.TOK_EXECUTE);
+    return ((ASTNode)tree.getChildren().get(1)).getText();
+  }
+
+  public static void bindDynamicParams(QueryPlan plan, Map<Integer, String> 
parameterMap) {
+    if (parameterMap == null ||  parameterMap.isEmpty()) {
+      //TODO: LOG
+       return;
+    }
+    Set<Operator<?>> operators = new HashSet<>();
+    if (plan.getFetchTask() != null) {
+      operators.addAll(getAllFetchOperators(plan.getFetchTask()));
+    }
+    List<Task<?>> allTasks = plan.getRootTasks();
+    List<TezTask> rootTasks = Utilities.getTezTasks(plan.getRootTasks());
+    for(Task task:allTasks) {
+      List<BaseWork> baseWorks = new ArrayList<>();
+      if (task instanceof ExplainTask) {
+        ExplainTask explainTask = (ExplainTask) task;
+        for (Task explainRootTask : explainTask.getWork().getRootTasks()) {
+          if (explainRootTask instanceof TezTask) {
+            TezTask explainTezTask = (TezTask) explainRootTask;
+            baseWorks.addAll(explainTezTask.getWork().getAllWork());
+          }
+        }
+      } else if (task instanceof TezTask) {
+        baseWorks = ((TezTask) task).getWork().getAllWork();
+      }
+      for (BaseWork baseWork : baseWorks) {
+        operators.addAll(baseWork.getAllOperators());
+      }
+    }
+
+    for (Operator<?> op : operators) {
+      switch(op.getType()) {
+      case FILTER:
+        FilterOperator filterOp = (FilterOperator)op;
+        ExprNodeDesc predicate = filterOp.getConf().getPredicate();
+        filterOp.getConf().setPredicate(
+            replaceDynamicParamsWithConstant(predicate, 
TypeInfoFactory.booleanTypeInfo, parameterMap));
+        break;
+      case SELECT:
+        if (op instanceof VectorSelectOperator) {
+          VectorSelectOperator selectOperator = (VectorSelectOperator) op;
+          List<ExprNodeDesc> selectExprList = 
selectOperator.getConf().getColList();
+          if (selectExprList != null) {
+            
selectOperator.getConf().setColList(replaceDynamicParamsInExprList(selectExprList,
 parameterMap));
+          }
+        } else {
+          SelectOperator selectOperator = (SelectOperator)op;
+          List<ExprNodeDesc> selectExprList = 
selectOperator.getConf().getColList();
+          if (selectExprList != null) {
+            
selectOperator.getConf().setColList(replaceDynamicParamsInExprList(selectExprList,
 parameterMap));
+          }
+        }
+        break;
+      default:
+        //exception
+      }
+    }
+  }
+
+  private static List<ExprNodeDesc> 
replaceDynamicParamsInExprList(List<ExprNodeDesc> exprList,
+      Map<Integer, String> paramMap) {
+    List<ExprNodeDesc> updatedExprList = new ArrayList<>();
+    for (ExprNodeDesc expr:exprList) {
+      expr = replaceDynamicParamsWithConstant(expr, expr.getTypeInfo(), 
paramMap);
+      updatedExprList.add(expr);
+    }
+    return updatedExprList;
+  }
+
+  // Note about type inference
+  // Since dynamic parameter lacks type we need to figure out appropriate type 
to create constant
+  // out of string value. To do this, we choose the type of first child of the 
parent expression
+  // which isn't dynamic parameter
+  // TODO: cases to consider/cover
+  //  exprs have no children, expres have all dynamic parameters
+  private static ExprNodeDesc replaceDynamicParamsWithConstant(ExprNodeDesc 
expr,

Review comment:
       This is a difficult problem indeed. I guess this assumption is fine 
right now but it seems it can cause different behavior compared to not using 
prepare statement. We should be clear about this and discuss how we will do 
inference moving forward (we should check the standard to see how they deal 
with it). In addition, I am wondering whether, for the time being, we should 
limit dynamic parameters to filter expressions because of this (it seems 
replacing for project may have more side effects over the plan).

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/Context.java
##########
@@ -94,6 +94,7 @@
   // scratch directory to use for local file system tmp folders
   private final String localScratchDir;
 
+  protected Map<Integer, String> paramMap;

Review comment:
       Add comment?

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
##########
@@ -283,6 +283,34 @@ public Object process(Node nd, Stack<Node> stack, 
NodeProcessorCtx procCtx,
 
   }
 
+  /**
+   * Processor for processing NULL expression.
+   */
+  public class DynamicParameterProcessor implements SemanticNodeProcessor {
+
+    @Override
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+        Object... nodeOutputs) throws SemanticException {
+      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
+      if (ctx.getError() != null) {
+        return null;
+      }
+
+      T desc = processGByExpr(nd, procCtx);
+      if (desc != null) {
+        return desc;
+      }
+
+      ASTNode node = (ASTNode)nd;
+      //String indexStr = ((ASTNode)node.getChildren().get(0)).getText();
+      String indexStr = ((ASTNode)(node)).getText();

Review comment:
       nit. (node) -> node

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
##########
@@ -375,6 +375,15 @@ public static ASTNode literal(RexLiteral literal) {
     return (ASTNode) ParseDriver.adaptor.create(type, String.valueOf(val));
   }
 
+  public static ASTNode dynamicParam(RexDynamicParam param) {
+    ASTNode node = 
(ASTNode)ParseDriver.adaptor.create(HiveParser.TOK_PARAMETER,
+        Integer.toString(param.getIndex()));
+    //ASTNode child = 
(ASTNode)ParseDriver.adaptor.create(HiveParser.TOK_PARAMETER_IDX,

Review comment:
       nit. Commented code

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/PrepareStatementAnalyzer.java
##########
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.*;
+
+import java.util.*;
+
+/**
+ * Analyzer for table dropping commands.

Review comment:
       Update comment

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
##########
@@ -283,6 +283,34 @@ public Object process(Node nd, Stack<Node> stack, 
NodeProcessorCtx procCtx,
 
   }
 
+  /**
+   * Processor for processing NULL expression.

Review comment:
       Update javadoc.

##########
File path: 
parser/src/java/org/apache/hadoop/hive/ql/parse/PrepareStatementParser.g
##########
@@ -0,0 +1,76 @@
+/**
+   Licensed to the Apache Software Foundation (ASF) under one or more 
+   contributor license agreements.  See the NOTICE file distributed with 
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with 
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+*/
+parser grammar PrepareStatementParser;
+
+options
+{
+output=AST;
+ASTLabelType=ASTNode;
+backtrack=false;
+k=3;
+}
+
+@members {
+  @Override
+  public Object recoverFromMismatchedSet(IntStream input,
+      RecognitionException re, BitSet follow) throws RecognitionException {
+    throw re;
+  }
+  @Override
+  public void displayRecognitionError(String[] tokenNames,
+      RecognitionException e) {
+    gParent.errors.add(new ParseError(gParent, e, tokenNames));
+  }
+}
+
+@rulecatch {
+catch (RecognitionException e) {
+  throw e;
+}
+}
+
+//----------------------- Rules for parsing Prepare 
statement-----------------------------
+prepareStatement
+@init { gParent.pushMsg("prepare statement ", state); }
+@after { gParent.popMsg(state); }
+    : KW_PREPARE identifier KW_AS queryStatementExpression

Review comment:
       Standard uses `FROM` instead of `AS`, let's use that.

##########
File path: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
##########
@@ -6511,4 +6511,13 @@ public void verifyAndSetAll(Map<String, String> overlay) 
{
     }
     return ret;
   }
+
+  // sync all configs from given conf
+  public void syncFromConf(HiveConf conf) {

Review comment:
       Could you use the copy constructor here, i.e., `HiveConf(HiveConf 
other)`?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/Compiler.java
##########
@@ -43,13 +42,7 @@
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
-import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl;
-import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.ParseUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.parse.*;

Review comment:
       nit. Expand imports (it happens all over the code)

##########
File path: 
parser/src/java/org/apache/hadoop/hive/ql/parse/PrepareStatementParser.g
##########
@@ -0,0 +1,76 @@
+/**
+   Licensed to the Apache Software Foundation (ASF) under one or more 
+   contributor license agreements.  See the NOTICE file distributed with 
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with 
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+*/
+parser grammar PrepareStatementParser;
+
+options
+{
+output=AST;
+ASTLabelType=ASTNode;
+backtrack=false;
+k=3;
+}
+
+@members {
+  @Override
+  public Object recoverFromMismatchedSet(IntStream input,
+      RecognitionException re, BitSet follow) throws RecognitionException {
+    throw re;
+  }
+  @Override
+  public void displayRecognitionError(String[] tokenNames,
+      RecognitionException e) {
+    gParent.errors.add(new ParseError(gParent, e, tokenNames));
+  }
+}
+
+@rulecatch {
+catch (RecognitionException e) {
+  throw e;
+}
+}
+
+//----------------------- Rules for parsing Prepare 
statement-----------------------------
+prepareStatement
+@init { gParent.pushMsg("prepare statement ", state); }
+@after { gParent.popMsg(state); }
+    : KW_PREPARE identifier KW_AS queryStatementExpression
+    -> ^(TOK_PREPARE queryStatementExpression identifier)
+    ;
+
+executeStatement
+@init { gParent.pushMsg("execute statement ", state); }
+@after { gParent.popMsg(state); }
+    : KW_EXECUTE identifier executeParamList

Review comment:
       Standard uses `USING` before param list, let's add it.
   
   Also it seems we should simply add the parameter to the list without its 
position (number).

##########
File path: parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
##########
@@ -471,6 +472,11 @@ Number
     (Digit)+ ( DOT (Digit)* (Exponent)? | Exponent)?
     ;
 
+ParamLiteral
+    :
+    (DOLLAR) (Digit)+ { setText(getText().substring(1, getText().length())); }

Review comment:
       It seems standard specifies `?` for dynamic parameters, and with the 
exception of Postgres, it is the most widely accepted representation. Can we 
change it?
   Also, not sure whether `$` may clash with variable substitution in Hive (or 
maybe we check for `${var}`?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java
##########
@@ -22,15 +22,17 @@
 
 import com.google.common.collect.LinkedHashMultimap;
 import com.google.common.collect.SetMultimap;
+import org.xbill.DNS.*;

Review comment:
       ?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/Driver.java
##########
@@ -621,6 +621,10 @@ private void runInternal(String command, boolean 
alreadyCompiled) throws Command
 
     driverState.lock();
     try {
+      if (driverContext.getPlan().getIsPrepareQuery() && 
!driverContext.getPlan().isExplain()) {

Review comment:
       Is this needed? This method calls compilation and other relevant logic 
related to logging, locking, etc. It seems in general it would make sense to 
run through this logic as usual.

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/plan/ExprDynamicParamDesc.java
##########
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.hadoop.hive.common.StringInternUtils;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * A constant expression.
+ */
+public class ExprDynamicParamDesc extends ExprNodeDesc implements Serializable 
{
+  private static final long serialVersionUID = 1L;
+  final protected transient static char[] hexArray = 
"0123456789ABCDEF".toCharArray();

Review comment:
       Not used?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/PreparePlanUtils.java
##########
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.io.*;
+import java.util.*;
+
+import org.apache.hadoop.hive.common.type.*;
+import org.apache.hadoop.hive.conf.*;
+import org.apache.hadoop.hive.ql.exec.*;
+import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.exec.tez.*;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.hooks.HookUtils;
+import org.apache.hadoop.hive.ql.io.AcidUtils;
+import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.optimizer.physical.*;
+import org.apache.hadoop.hive.ql.parse.*;
+import org.apache.hadoop.hive.ql.parse.type.*;
+import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
+import 
org.apache.hadoop.hive.ql.security.authorization.command.CommandAuthorizer;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.udf.generic.*;
+import org.apache.hadoop.hive.serde.*;
+import org.apache.hadoop.hive.serde2.typeinfo.*;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableMap;
+
+/**
+ * The compiler compiles the command, by creating a QueryPlan from a String 
command.
+ * Also opens a transaction if necessary.
+ */
+public class PreparePlanUtils {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(PreparePlanUtils.class);
+
+  private static Set<Operator<?>> getAllFetchOperators(FetchTask task) {
+    if (task.getWork().getSource() == null)  {
+      return Collections.EMPTY_SET;
+    }
+    Set<Operator<?>> operatorList =  new HashSet<>();
+    operatorList.add(task.getWork().getSource());
+    return 
AnnotateRunTimeStatsOptimizer.getAllOperatorsForSimpleFetch(operatorList);
+  }
+
+  /*
+   * Retrieve name for PREPARE/EXECUTE statement
+   * Make sure the tree is either EXECUTE or PREPARE
+   */
+  protected static String getPrepareStatementName(ASTNode tree) {
+    if (tree.getType() == HiveParser.TOK_EXPLAIN) {
+      tree = (ASTNode)tree.getChildren().get(0);
+    }
+    assert (tree.getType() == HiveParser.TOK_PREPARE
+        || tree.getType() == HiveParser.TOK_EXECUTE);
+    return ((ASTNode)tree.getChildren().get(1)).getText();
+  }
+
+  public static void bindDynamicParams(QueryPlan plan, Map<Integer, String> 
parameterMap) {
+    if (parameterMap == null ||  parameterMap.isEmpty()) {
+      //TODO: LOG
+       return;
+    }
+    Set<Operator<?>> operators = new HashSet<>();
+    if (plan.getFetchTask() != null) {
+      operators.addAll(getAllFetchOperators(plan.getFetchTask()));
+    }
+    List<Task<?>> allTasks = plan.getRootTasks();
+    List<TezTask> rootTasks = Utilities.getTezTasks(plan.getRootTasks());
+    for(Task task:allTasks) {
+      List<BaseWork> baseWorks = new ArrayList<>();
+      if (task instanceof ExplainTask) {
+        ExplainTask explainTask = (ExplainTask) task;
+        for (Task explainRootTask : explainTask.getWork().getRootTasks()) {
+          if (explainRootTask instanceof TezTask) {
+            TezTask explainTezTask = (TezTask) explainRootTask;
+            baseWorks.addAll(explainTezTask.getWork().getAllWork());
+          }
+        }
+      } else if (task instanceof TezTask) {
+        baseWorks = ((TezTask) task).getWork().getAllWork();
+      }
+      for (BaseWork baseWork : baseWorks) {
+        operators.addAll(baseWork.getAllOperators());
+      }
+    }
+
+    for (Operator<?> op : operators) {
+      switch(op.getType()) {
+      case FILTER:
+        FilterOperator filterOp = (FilterOperator)op;
+        ExprNodeDesc predicate = filterOp.getConf().getPredicate();
+        filterOp.getConf().setPredicate(
+            replaceDynamicParamsWithConstant(predicate, 
TypeInfoFactory.booleanTypeInfo, parameterMap));
+        break;
+      case SELECT:
+        if (op instanceof VectorSelectOperator) {
+          VectorSelectOperator selectOperator = (VectorSelectOperator) op;
+          List<ExprNodeDesc> selectExprList = 
selectOperator.getConf().getColList();
+          if (selectExprList != null) {
+            
selectOperator.getConf().setColList(replaceDynamicParamsInExprList(selectExprList,
 parameterMap));
+          }
+        } else {
+          SelectOperator selectOperator = (SelectOperator)op;
+          List<ExprNodeDesc> selectExprList = 
selectOperator.getConf().getColList();
+          if (selectExprList != null) {
+            
selectOperator.getConf().setColList(replaceDynamicParamsInExprList(selectExprList,
 parameterMap));
+          }
+        }
+        break;
+      default:
+        //exception
+      }
+    }
+  }
+
+  private static List<ExprNodeDesc> 
replaceDynamicParamsInExprList(List<ExprNodeDesc> exprList,
+      Map<Integer, String> paramMap) {
+    List<ExprNodeDesc> updatedExprList = new ArrayList<>();
+    for (ExprNodeDesc expr:exprList) {
+      expr = replaceDynamicParamsWithConstant(expr, expr.getTypeInfo(), 
paramMap);
+      updatedExprList.add(expr);
+    }
+    return updatedExprList;
+  }
+
+  // Note about type inference
+  // Since dynamic parameter lacks type we need to figure out appropriate type 
to create constant
+  // out of string value. To do this, we choose the type of first child of the 
parent expression
+  // which isn't dynamic parameter
+  // TODO: cases to consider/cover
+  //  exprs have no children, expres have all dynamic parameters
+  private static ExprNodeDesc replaceDynamicParamsWithConstant(ExprNodeDesc 
expr,
+      TypeInfo typeInfo, Map<Integer, String> paramMap) {
+    if (expr.getChildren() == null || expr.getChildren().isEmpty()) {
+      if (expr instanceof ExprDynamicParamDesc) {
+        return getConstant((ExprDynamicParamDesc)expr, typeInfo, paramMap);
+      }
+      return expr;
+    }
+
+    for(ExprNodeDesc child:expr.getChildren()) {
+      // we need typeinfo
+      if(child instanceof ExprDynamicParamDesc) {
+        continue;
+      } else {
+        typeInfo = child.getTypeInfo();
+        break;
+      }
+    }
+    //TODO: this could be null in case expr doesn't have any child
+    assert(typeInfo != null);
+
+    List<ExprNodeDesc> exprList = new ArrayList<>();
+    for(ExprNodeDesc child: expr.getChildren()) {
+      if(child instanceof ExprDynamicParamDesc) {
+        child = getConstant((ExprDynamicParamDesc)child, typeInfo, paramMap);
+      } else {
+        child = replaceDynamicParamsWithConstant(child, typeInfo, paramMap);
+      }
+      exprList.add(child);
+    }
+    expr.getChildren().clear();
+    expr.getChildren().addAll(exprList);
+    return expr;
+  }
+
+  private static ExprNodeConstantDesc getConstant(ExprDynamicParamDesc 
dynamicExpr,
+      TypeInfo typeInfo, Map<Integer, String> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+    String value = parameterMap.get(dynamicExpr.getIndex());
+
+    //TODO: probably should create single instance and reuse it

Review comment:
       Can we use `factory.createConstantExpr(TypeInfo typeInfo, Object 
constantValue)`?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/PreparePlanUtils.java
##########
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.io.*;
+import java.util.*;
+
+import org.apache.hadoop.hive.common.type.*;
+import org.apache.hadoop.hive.conf.*;
+import org.apache.hadoop.hive.ql.exec.*;
+import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.exec.tez.*;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.hooks.HookUtils;
+import org.apache.hadoop.hive.ql.io.AcidUtils;
+import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.optimizer.physical.*;
+import org.apache.hadoop.hive.ql.parse.*;
+import org.apache.hadoop.hive.ql.parse.type.*;
+import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
+import 
org.apache.hadoop.hive.ql.security.authorization.command.CommandAuthorizer;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.udf.generic.*;
+import org.apache.hadoop.hive.serde.*;
+import org.apache.hadoop.hive.serde2.typeinfo.*;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableMap;
+
+/**
+ * The compiler compiles the command, by creating a QueryPlan from a String 
command.
+ * Also opens a transaction if necessary.
+ */
+public class PreparePlanUtils {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(PreparePlanUtils.class);
+
+  private static Set<Operator<?>> getAllFetchOperators(FetchTask task) {
+    if (task.getWork().getSource() == null)  {
+      return Collections.EMPTY_SET;
+    }
+    Set<Operator<?>> operatorList =  new HashSet<>();
+    operatorList.add(task.getWork().getSource());
+    return 
AnnotateRunTimeStatsOptimizer.getAllOperatorsForSimpleFetch(operatorList);
+  }
+
+  /*
+   * Retrieve name for PREPARE/EXECUTE statement
+   * Make sure the tree is either EXECUTE or PREPARE
+   */
+  protected static String getPrepareStatementName(ASTNode tree) {
+    if (tree.getType() == HiveParser.TOK_EXPLAIN) {
+      tree = (ASTNode)tree.getChildren().get(0);
+    }
+    assert (tree.getType() == HiveParser.TOK_PREPARE
+        || tree.getType() == HiveParser.TOK_EXECUTE);
+    return ((ASTNode)tree.getChildren().get(1)).getText();
+  }
+
+  public static void bindDynamicParams(QueryPlan plan, Map<Integer, String> 
parameterMap) {
+    if (parameterMap == null ||  parameterMap.isEmpty()) {
+      //TODO: LOG
+       return;
+    }
+    Set<Operator<?>> operators = new HashSet<>();
+    if (plan.getFetchTask() != null) {
+      operators.addAll(getAllFetchOperators(plan.getFetchTask()));
+    }
+    List<Task<?>> allTasks = plan.getRootTasks();
+    List<TezTask> rootTasks = Utilities.getTezTasks(plan.getRootTasks());
+    for(Task task:allTasks) {
+      List<BaseWork> baseWorks = new ArrayList<>();
+      if (task instanceof ExplainTask) {
+        ExplainTask explainTask = (ExplainTask) task;
+        for (Task explainRootTask : explainTask.getWork().getRootTasks()) {
+          if (explainRootTask instanceof TezTask) {
+            TezTask explainTezTask = (TezTask) explainRootTask;
+            baseWorks.addAll(explainTezTask.getWork().getAllWork());
+          }
+        }
+      } else if (task instanceof TezTask) {
+        baseWorks = ((TezTask) task).getWork().getAllWork();
+      }
+      for (BaseWork baseWork : baseWorks) {
+        operators.addAll(baseWork.getAllOperators());
+      }
+    }
+
+    for (Operator<?> op : operators) {
+      switch(op.getType()) {
+      case FILTER:
+        FilterOperator filterOp = (FilterOperator)op;
+        ExprNodeDesc predicate = filterOp.getConf().getPredicate();
+        filterOp.getConf().setPredicate(
+            replaceDynamicParamsWithConstant(predicate, 
TypeInfoFactory.booleanTypeInfo, parameterMap));
+        break;
+      case SELECT:
+        if (op instanceof VectorSelectOperator) {
+          VectorSelectOperator selectOperator = (VectorSelectOperator) op;
+          List<ExprNodeDesc> selectExprList = 
selectOperator.getConf().getColList();
+          if (selectExprList != null) {
+            
selectOperator.getConf().setColList(replaceDynamicParamsInExprList(selectExprList,
 parameterMap));
+          }
+        } else {
+          SelectOperator selectOperator = (SelectOperator)op;
+          List<ExprNodeDesc> selectExprList = 
selectOperator.getConf().getColList();
+          if (selectExprList != null) {
+            
selectOperator.getConf().setColList(replaceDynamicParamsInExprList(selectExprList,
 parameterMap));
+          }
+        }
+        break;
+      default:
+        //exception
+      }
+    }
+  }
+
+  private static List<ExprNodeDesc> 
replaceDynamicParamsInExprList(List<ExprNodeDesc> exprList,
+      Map<Integer, String> paramMap) {
+    List<ExprNodeDesc> updatedExprList = new ArrayList<>();
+    for (ExprNodeDesc expr:exprList) {
+      expr = replaceDynamicParamsWithConstant(expr, expr.getTypeInfo(), 
paramMap);
+      updatedExprList.add(expr);
+    }
+    return updatedExprList;
+  }
+
+  // Note about type inference
+  // Since dynamic parameter lacks type we need to figure out appropriate type 
to create constant
+  // out of string value. To do this, we choose the type of first child of the 
parent expression
+  // which isn't dynamic parameter
+  // TODO: cases to consider/cover
+  //  exprs have no children, expres have all dynamic parameters
+  private static ExprNodeDesc replaceDynamicParamsWithConstant(ExprNodeDesc 
expr,
+      TypeInfo typeInfo, Map<Integer, String> paramMap) {
+    if (expr.getChildren() == null || expr.getChildren().isEmpty()) {
+      if (expr instanceof ExprDynamicParamDesc) {
+        return getConstant((ExprDynamicParamDesc)expr, typeInfo, paramMap);
+      }
+      return expr;
+    }
+
+    for(ExprNodeDesc child:expr.getChildren()) {
+      // we need typeinfo
+      if(child instanceof ExprDynamicParamDesc) {
+        continue;
+      } else {
+        typeInfo = child.getTypeInfo();
+        break;
+      }
+    }
+    //TODO: this could be null in case expr doesn't have any child
+    assert(typeInfo != null);
+
+    List<ExprNodeDesc> exprList = new ArrayList<>();
+    for(ExprNodeDesc child: expr.getChildren()) {
+      if(child instanceof ExprDynamicParamDesc) {
+        child = getConstant((ExprDynamicParamDesc)child, typeInfo, paramMap);
+      } else {
+        child = replaceDynamicParamsWithConstant(child, typeInfo, paramMap);
+      }
+      exprList.add(child);
+    }
+    expr.getChildren().clear();
+    expr.getChildren().addAll(exprList);
+    return expr;
+  }
+
+  private static ExprNodeConstantDesc getConstant(ExprDynamicParamDesc 
dynamicExpr,
+      TypeInfo typeInfo, Map<Integer, String> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+    String value = parameterMap.get(dynamicExpr.getIndex());
+
+    //TODO: probably should create single instance and reuse it
+    ExprNodeDescExprFactory factory = new ExprNodeDescExprFactory();
+    if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
+      return factory.createBooleanConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
+      return factory.createIntConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
+      return factory.createBigintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.charTypeInfo)) {
+      //TODO
+      assert(false);
+    } else if (typeInfo.equals(TypeInfoFactory.varcharTypeInfo)) {
+      //TODO
+      assert(false);
+    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
+      return factory.createFloatConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
+      return factory.createDoubleConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
+      //TODO
+      assert(false);
+    } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
+      return factory.createSmallintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
+      return factory.createDateConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
+      return factory.createTimestampConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalYearMonthTypeInfo)) {
+      return factory.createIntervalYearMonthConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalDayTimeTypeInfo)) {
+      return factory.createIntervalDayTimeConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
+      //TODO
+      assert(false);
+    }
+    // we will let constant expression itself infer the type
+    return new ExprNodeConstantDesc(parameterMap.get(dynamicExpr.getIndex()));
+  }
+
+  public static QueryPlan makeCopy(final QueryPlan queryPlan) {

Review comment:
       `makeCopy` -> `deepCopy`. Can we move the method to `QueryPlan`?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/Compiler.java
##########
@@ -329,7 +357,34 @@ private void recordValidTxns(HiveTxnManager txnMgr) throws 
LockException {
     LOG.debug("Encoding valid txns info " + txnStr + " txnid:" + 
txnMgr.getCurrentTxnId());
   }
 
-  private QueryPlan createPlan(BaseSemanticAnalyzer sem) {
+  protected QueryPlan createPlan(BaseSemanticAnalyzer sem) {
+    if (getStatementType(tree) == StatementType.EXECUTE) {
+      assert (tree != null);
+      String queryName = PreparePlanUtils.getPrepareStatementName(tree);
+
+      SessionState ss = SessionState.get();
+      if (ss != null) {
+        if (ss.getPreparePlans().containsKey(queryName)) {
+          QueryPlan plan = ss.getPreparePlans().get(queryName);
+          QueryPlan planCopy = PreparePlanUtils.makeCopy(plan);
+          bindDynamicParams(planCopy, getParameterMap());
+          // This is no longer prepare plan
+          planCopy.setIsPrepareQuery(false);
+          //TODO: this will create empty schema, need to figure out a way to 
create proper schema

Review comment:
       Once we have retrieved the plan at this stage, we probably need to 
change query id to avoid any issue, i.e., multiple queries with same query id? 
Also we could update query text in plan and I am not sure if anything else is 
needed.

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
##########
@@ -106,10 +98,12 @@
   private org.apache.hadoop.hive.ql.plan.api.Query query;
   private final Map<String, Map<String, Long>> counters =
       new ConcurrentHashMap<String, Map<String, Long>>();
-  private final Set<String> done = Collections.newSetFromMap(new
-      ConcurrentHashMap<String, Boolean>());
-  private final Set<String> started = Collections.newSetFromMap(new
-      ConcurrentHashMap<String, Boolean>());
+  //private final Set<String> done = Collections.newSetFromMap(new

Review comment:
       I did not understand this change?

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/PrepareStatementAnalyzer.java
##########
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.*;
+
+import java.util.*;
+
+/**
+ * Analyzer for table dropping commands.
+ */
+@DDLType(types = HiveParser.TOK_PREPARE)
+public class PrepareStatementAnalyzer extends CalcitePlanner {
+  /*public DropTableAnalyzer(QueryState queryState) throws SemanticException {

Review comment:
       nit. Commented code

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import java.util.*;
+
+/**
+ * Analyzer for table dropping commands.

Review comment:
       Update comment

##########
File path: ql/src/test/queries/clientpositive/prepare_plan.q
##########
@@ -0,0 +1,46 @@
+--! qt:dataset:src
+--! qt:dataset:alltypesorc
+
+set hive.explain.user=false;
+--explain select * from src where key > '0' limit 10;
+
+
+-- single param
+explain prepare p1 as select * from src where key > $1 limit 10;
+prepare p1 as select * from src where key > $1 limit 10;
+
+execute p1 (1,'200');
+select * from src where key > '200' limit 10;
+
+-- same query, different param
+execute p1 (1, '0');
+select * from src where key > '0' limit 10;
+
+-- different param type
+execute p1 (1,0);
+select * from src where key > 0 limit 10;
+
+-- multiple parameters
+explain prepare p2 as select min(ctinyint), max(cbigint) from alltypesorc 
where cint > ($1 + $2 + $3) group by ctinyint;

Review comment:
       Cool! :) 

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import java.util.*;
+
+/**
+ * Analyzer for table dropping commands.
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+  /*public DropTableAnalyzer(QueryState queryState) throws SemanticException {

Review comment:
       nit. Commented code

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/Compiler.java
##########
@@ -72,19 +65,52 @@
   private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   private static final LogHelper CONSOLE = new LogHelper(LOG);
 
-  private final Context context;
-  private final DriverContext driverContext;
+  protected final Context context;
+  protected final DriverContext driverContext;
   private final DriverState driverState;
   private final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private ASTNode tree;
 
+  /*
+   * Compiler needs to know if given statement is PREPARE/EXECUTE or otherwise
+   * to orchestrate the planning.
+   * e.g. if plan should be stored or if parameter binding should be done.
+   */
+  enum StatementType{
+    PREPARE,
+    EXECUTE,
+    REGULAR
+  };
+
   public Compiler(Context context, DriverContext driverContext, DriverState 
driverState) {
     this.context = context;
     this.driverContext = driverContext;
     this.driverState = driverState;
   }
 
+  /*
+   * Give string command determine the type of statement
+   */
+  private StatementType getStatementType(final ASTNode tree) {

Review comment:
       I was hoping this orchestration could be moved to the different 
analyzers for `PREPARE` and `EXECUTE`, e.g., similar to 
`ExplainSemanticAnalyzer` or others. It may make the code easier to understand 
instead of creating different workflows in the compiler depending on the 
statement type (I think `Compiler` is mostly agnostic from statement type and 
would probably be good to try to keep it that way).




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to