Author: rhbutani
Date: Thu Feb  6 23:34:52 2014
New Revision: 1565483

URL: http://svn.apache.org/r1565483
Log:
HIVE-1180 Support Common Table Expressions (CTEs) in Hive (Harish Butani 
reviewed by Gunther H.)

Added:
    hive/trunk/ql/src/test/queries/clientnegative/cte_recursion.q
    hive/trunk/ql/src/test/queries/clientnegative/cte_with_in_subquery.q
    hive/trunk/ql/src/test/queries/clientpositive/cte_1.q
    hive/trunk/ql/src/test/queries/clientpositive/cte_2.q
    hive/trunk/ql/src/test/results/clientnegative/cte_recursion.q.out
    hive/trunk/ql/src/test/results/clientnegative/cte_with_in_subquery.q.out
    hive/trunk/ql/src/test/results/clientpositive/cte_1.q.out
    hive/trunk/ql/src/test/results/clientpositive/cte_2.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
    
hive/trunk/ql/src/test/results/clientnegative/authorization_invalid_priv_v1.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1565483&r1=1565482&r2=1565483&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Thu Feb 
 6 23:34:52 2014
@@ -317,6 +317,7 @@ TOK_SUBQUERY_OP_NOTIN;
 TOK_SUBQUERY_OP_NOTEXISTS;
 TOK_DB_TYPE;
 TOK_TABLE_TYPE;
+TOK_CTE;
 }
 
 
@@ -787,7 +788,7 @@ createTableStatement
          tableFileFormat?
          tableLocation?
          tablePropertiesPrefixed?
-         (KW_AS selectStatement[true])?
+         (KW_AS selectStatementWithCTE)?
       )
     -> ^(TOK_CREATETABLE $name $ext? ifNotExists?
          ^(TOK_LIKETABLE $likeName?)
@@ -800,7 +801,7 @@ createTableStatement
          tableFileFormat?
          tableLocation?
          tablePropertiesPrefixed?
-         selectStatement?
+         selectStatementWithCTE?
         )
     ;
 
@@ -939,8 +940,8 @@ alterViewStatementSuffix
         -> ^(TOK_ALTERVIEW_ADDPARTS alterStatementSuffixAddPartitions)
     | alterStatementSuffixDropPartitions
         -> ^(TOK_ALTERVIEW_DROPPARTS alterStatementSuffixDropPartitions)
-    | name=tableName KW_AS selectStatement[true]
-        -> ^(TOK_ALTERVIEW_AS $name selectStatement)
+    | name=tableName KW_AS selectStatementWithCTE
+        -> ^(TOK_ALTERVIEW_AS $name selectStatementWithCTE)
     ;
 
 alterIndexStatementSuffix
@@ -1511,14 +1512,14 @@ createViewStatement
         (LPAREN columnNameCommentList RPAREN)? tableComment? viewPartition?
         tablePropertiesPrefixed?
         KW_AS
-        selectStatement[true]
+        selectStatementWithCTE
     -> ^(TOK_CREATEVIEW $name orReplace?
          ifNotExists?
          columnNameCommentList?
          tableComment?
          viewPartition?
          tablePropertiesPrefixed?
-         selectStatement
+         selectStatementWithCTE
         )
     ;
 
@@ -1910,10 +1911,36 @@ setOperator
 
 queryStatementExpression[boolean topLevel]
     :
+    /* Would be nice to do this as a gated semantic perdicate
+       But the predicate gets pushed as a lookahead decision.
+       Calling rule doesnot know about topLevel
+    */
+    (w=withClause {topLevel}?)?
+    queryStatementExpressionBody[topLevel] {
+      if ($w.tree != null) {
+      adaptor.addChild($queryStatementExpressionBody.tree, $w.tree);
+      }
+    }
+    ->  queryStatementExpressionBody
+    ;
+
+queryStatementExpressionBody[boolean topLevel]
+    :
     fromStatement[topLevel]
     | regularBody[topLevel]
     ;
-    
+
+withClause
+  :
+  KW_WITH cteStatement (COMMA cteStatement)* -> ^(TOK_CTE cteStatement+)
+;
+
+cteStatement
+   :
+   identifier KW_AS LPAREN queryStatementExpression[false] RPAREN
+   -> ^(TOK_SUBQUERY queryStatementExpression identifier)
+;
+
 fromStatement[boolean topLevel]
 : (singleFromStatement  -> singleFromStatement)
        (u=setOperator r=singleFromStatement
@@ -1987,6 +2014,17 @@ singleSelectStatement
                      distributeByClause? sortByClause? window_clause? 
limitClause?))
    ;
 
+selectStatementWithCTE
+    :
+    (w=withClause)?
+    selectStatement[true] {
+      if ($w.tree != null) {
+      adaptor.addChild($selectStatement.tree, $w.tree);
+      }
+    }
+    ->  selectStatement
+    ;
+
 body
    :
    insertClause

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1565483&r1=1565482&r2=1565483&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Thu Feb  6 
23:34:52 2014
@@ -219,6 +219,10 @@ public class QB {
     aliasToSubq.put(alias, qbexpr);
   }
 
+  public void rewriteCTEToSubq(String alias, String cteName, QBExpr qbexpr) {
+    rewriteViewToSubq(alias, cteName, qbexpr);
+  }
+
   public QBJoinTree getQbJoinTree() {
     return qbjoin;
   }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1565483&r1=1565482&r2=1565483&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
Thu Feb  6 23:34:52 2014
@@ -251,6 +251,15 @@ public class SemanticAnalyzer extends Ba
   //flag for partial scan during analyze ... compute statistics
   protected boolean partialscan = false;
 
+  /*
+   * Capture the CTE definitions in a Query.
+   */
+  private final Map<String, ASTNode> aliasToCTEs;
+  /*
+   * Used to check recursive CTE invocations. Similar to viewsExpanded
+   */
+  private ArrayList<String> ctesExpanded;
+
   private static class Phase1Ctx {
     String dest;
     int nextNum;
@@ -286,6 +295,7 @@ public class SemanticAnalyzer extends Ba
         HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME);
     queryProperties = new QueryProperties();
     opToPartToSkewedPruner = new HashMap<TableScanOperator, Map<String, 
ExprNodeDesc>>();
+    aliasToCTEs = new HashMap<String, ASTNode>();
   }
 
   @Override
@@ -305,6 +315,7 @@ public class SemanticAnalyzer extends Ba
     opParseCtx.clear();
     groupOpToInputTables.clear();
     prunedPartitions.clear();
+    aliasToCTEs.clear();
   }
 
   public void initParseCtx(ParseContext pctx) {
@@ -667,6 +678,98 @@ public class SemanticAnalyzer extends Ba
     return alias;
   }
 
+  /*
+   * Phase1: hold onto any CTE definitions in aliasToCTE.
+   * CTE definitions are global to the Query.
+   */
+  private void processCTE(QB qb, ASTNode ctes) throws SemanticException {
+
+    int numCTEs = ctes.getChildCount();
+
+    for(int i=0; i <numCTEs; i++) {
+      ASTNode cte = (ASTNode) ctes.getChild(i);
+      ASTNode cteQry = (ASTNode) cte.getChild(0);
+      String alias = unescapeIdentifier(cte.getChild(1).getText());
+
+      String qName = qb.getId() == null ? "" : qb.getId() + ":";
+      qName += alias.toLowerCase();
+
+      if ( aliasToCTEs.containsKey(qName)) {
+        throw new 
SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(cte.getChild(1)));
+      }
+      aliasToCTEs.put(qName, cteQry);
+    }
+  }
+
+  /*
+   * We allow CTE definitions in views. So we can end up with a hierarchy of 
CTE definitions:
+   * - at the top level of a query statement
+   * - where a view is referenced.
+   * - views may refer to other views.
+   *
+   * The scoping rules we use are: to search for a CTE from the current QB 
outwards. In order to
+   * disambiguate between CTES are different levels we qualify(prefix) them 
with the id of the QB
+   * they appear in when adding them to the <code>aliasToCTEs</code> map.
+   * 
+   */
+  private ASTNode findCTEFromName(QB qb, String cteName) {
+
+    /*
+     * When saving a view definition all table references in the AST are 
qualified; including CTE references.
+     * Where as CTE definitions have no DB qualifier; so we strip out the DB 
qualifier before searching in 
+     * <code>aliasToCTEs</code> map.
+     */
+    String currDB = SessionState.get().getCurrentDatabase();
+    if ( currDB != null && cteName.startsWith(currDB) &&
+        cteName.length() > currDB.length() &&
+        cteName.charAt(currDB.length()) == '.'   ) {
+      cteName = cteName.substring(currDB.length() + 1);
+    }
+
+    StringBuffer qId = new StringBuffer();
+    if (qb.getId() != null) {
+      qId.append(qb.getId());
+    }
+
+    while (qId.length() > 0) {
+      String nm = qId + ":" + cteName;
+      if (aliasToCTEs.containsKey(nm)) {
+        return aliasToCTEs.get(nm);
+      }
+      int lastIndex = qId.lastIndexOf(":");
+      lastIndex = lastIndex < 0 ? 0 : lastIndex;
+      qId.setLength(lastIndex);
+    }
+    return aliasToCTEs.get(cteName);
+  }
+  
+  /*
+   * If a CTE is referenced in a QueryBlock:
+   * - add it as a SubQuery for now.
+   *   - SQ.alias is the alias used in QB. (if no alias is specified, 
+   *     it used the CTE name. Works just like table references)
+   *   - Adding SQ done by:
+   *     - copying AST of CTE
+   *     - setting ASTOrigin on cloned AST.
+   *   - trigger phase 1 on new QBExpr.
+   *   - update QB data structs: remove this as a table reference, move it to 
a SQ invocation. 
+   */
+  private void addCTEAsSubQuery(QB qb, String cteName, String cteAlias) throws 
SemanticException {
+    cteAlias = cteAlias == null ? cteName : cteAlias;
+    ASTNode cteQryNode = findCTEFromName(qb, cteName);
+    QBExpr cteQBExpr = new QBExpr(cteAlias);
+
+    String cteText = ctx.getTokenRewriteStream().toString(
+        cteQryNode.getTokenStartIndex(), cteQryNode.getTokenStopIndex());
+    final ASTNodeOrigin cteOrigin = new ASTNodeOrigin("CTE", cteName,
+        cteText, cteAlias, cteQryNode);
+    cteQryNode = (ASTNode) ParseDriver.adaptor.dupTree(cteQryNode);
+    SubQueryUtils.setOriginDeep(cteQryNode, cteOrigin);
+
+    doPhase1QBExpr(cteQryNode, cteQBExpr, qb.getId(), cteAlias);
+    qb.rewriteCTEToSubq(cteAlias, cteName, cteQBExpr);
+  }
+
   private boolean isJoinToken(ASTNode node) {
     if ((node.getToken().getType() == HiveParser.TOK_JOIN)
         || (node.getToken().getType() == HiveParser.TOK_CROSSJOIN)
@@ -1042,6 +1145,9 @@ public class SemanticAnalyzer extends Ba
         assert ast.getChildCount() == 1;
         qb.getParseInfo().getDestToLateralView().put(ctx_1.dest, ast);
         break;
+      case HiveParser.TOK_CTE:
+        processCTE(qb, ast);
+        break;
       default:
         skipRecursion = false;
         break;
@@ -1094,12 +1200,35 @@ public class SemanticAnalyzer extends Ba
       // This is needed for tracking the dependencies for inputs, along with 
their parents.
       Map<String, ObjectPair<String, ReadEntity>> aliasToViewInfo =
           new HashMap<String, ObjectPair<String, ReadEntity>>();
+
+      /*
+       * used to capture view to SQ conversions. This is used to check for
+       * recursive CTE invocations.
+       */
+      Map<String, String> sqAliasToCTEName = new HashMap<String, String>();
+
       for (String alias : tabAliases) {
         String tab_name = qb.getTabNameForAlias(alias);
         Table tab = null;
         try {
           tab = db.getTable(tab_name);
         } catch (InvalidTableException ite) {
+          /*
+           * if this s a CTE reference:
+           * Add its AST as a SubQuery to this QB.
+           */
+          ASTNode cteNode = findCTEFromName(qb, tab_name.toLowerCase());
+          if ( cteNode != null ) {
+            String cte_name = tab_name.toLowerCase();
+            if (ctesExpanded.contains(cte_name)) {
+              throw new SemanticException("Recursive cte " + tab_name +
+                  " detected (cycle: " + StringUtils.join(ctesExpanded, " -> 
") +
+                  " -> " + tab_name + ").");
+            }
+            addCTEAsSubQuery(qb, cte_name, alias);
+            sqAliasToCTEName.put(alias, cte_name);
+            continue;
+          }
           throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(qb
               .getParseInfo().getSrcForAlias(alias)));
         }
@@ -1192,15 +1321,20 @@ public class SemanticAnalyzer extends Ba
       // Go over the subqueries and getMetaData for these
       for (String alias : qb.getSubqAliases()) {
         boolean wasView = aliasToViewInfo.containsKey(alias);
+        boolean wasCTE = sqAliasToCTEName.containsKey(alias);
         ReadEntity newParentInput = null;
         if (wasView) {
           viewsExpanded.add(aliasToViewInfo.get(alias).getFirst());
           newParentInput = aliasToViewInfo.get(alias).getSecond();
+        } else if (wasCTE) {
+          ctesExpanded.add(sqAliasToCTEName.get(alias));
         }
         QBExpr qbexpr = qb.getSubqForAlias(alias);
         getMetaData(qbexpr, newParentInput);
         if (wasView) {
           viewsExpanded.remove(viewsExpanded.size() - 1);
+        } else if (wasCTE) {
+          ctesExpanded.remove(ctesExpanded.size() - 1);
         }
       }
 
@@ -8917,6 +9051,7 @@ public class SemanticAnalyzer extends Ba
     ASTNode child = ast;
     this.ast = ast;
     viewsExpanded = new ArrayList<String>();
+    ctesExpanded = new ArrayList<String>();
 
     LOG.info("Starting Semantic Analysis");
 

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1565483&r1=1565482&r2=1565483&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
 Thu Feb  6 23:34:52 2014
@@ -322,7 +322,7 @@ public class HiveAuthorizationTaskFactor
       Privilege privObj = 
PrivilegeRegistry.getPrivilege(privilegeType.getType());
 
       if (privObj == null) {
-        throw new SemanticException("undefined privilege " + 
privilegeType.getType());
+        throw new SemanticException("undefined privilege " + 
privilegeType.getText());
       }
       List<String> cols = null;
       if (privilegeDef.getChildCount() > 1) {

Added: hive/trunk/ql/src/test/queries/clientnegative/cte_recursion.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/cte_recursion.q?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/cte_recursion.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/cte_recursion.q Thu Feb  6 
23:34:52 2014
@@ -0,0 +1,4 @@
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from q1 where key = '5')
+select * from (select key from q1) a;
\ No newline at end of file

Added: hive/trunk/ql/src/test/queries/clientnegative/cte_with_in_subquery.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/cte_with_in_subquery.q?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/cte_with_in_subquery.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/cte_with_in_subquery.q Thu 
Feb  6 23:34:52 2014
@@ -0,0 +1 @@
+select * from (with q1 as ( select key from q2 where key = '5') select * from 
q1) a;

Added: hive/trunk/ql/src/test/queries/clientpositive/cte_1.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/cte_1.q?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/cte_1.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/cte_1.q Thu Feb  6 23:34:52 
2014
@@ -0,0 +1,28 @@
+explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+;
+
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+;
+
+-- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a;
+
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a;
+
+-- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a;
+
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a;
\ No newline at end of file

Added: hive/trunk/ql/src/test/queries/clientpositive/cte_2.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/cte_2.q?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/cte_2.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/cte_2.q Thu Feb  6 23:34:52 
2014
@@ -0,0 +1,56 @@
+
+-- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+;
+
+-- insert test
+create table s1 like src;
+with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+;
+select * from s1;
+drop table s1;
+
+-- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+;
+
+-- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+;
+
+select * from s2;
+drop table s2;
+
+-- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+;
+
+select * from v1;
+
+drop view v1;
+
+
+-- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+;
+
+with q1 as ( select key from src where key = '4')
+select * from v1
+;
+
+drop view v1;
+
+

Modified: 
hive/trunk/ql/src/test/results/clientnegative/authorization_invalid_priv_v1.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_invalid_priv_v1.q.out?rev=1565483&r1=1565482&r2=1565483&view=diff
==============================================================================
--- 
hive/trunk/ql/src/test/results/clientnegative/authorization_invalid_priv_v1.q.out
 (original)
+++ 
hive/trunk/ql/src/test/results/clientnegative/authorization_invalid_priv_v1.q.out
 Thu Feb  6 23:34:52 2014
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table if not exists authorization_invalid_v1 (key int, 
value string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@authorization_invalid_v1
-FAILED: SemanticException undefined privilege 731
+FAILED: SemanticException undefined privilege TOK_PRIV_DELETE

Added: hive/trunk/ql/src/test/results/clientnegative/cte_recursion.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/cte_recursion.q.out?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/cte_recursion.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/cte_recursion.q.out Thu Feb  
6 23:34:52 2014
@@ -0,0 +1 @@
+FAILED: SemanticException Recursive cte q1 detected (cycle: q1 -> q2 -> q1).

Added: hive/trunk/ql/src/test/results/clientnegative/cte_with_in_subquery.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/cte_with_in_subquery.q.out?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/cte_with_in_subquery.q.out 
(added)
+++ hive/trunk/ql/src/test/results/clientnegative/cte_with_in_subquery.q.out 
Thu Feb  6 23:34:52 2014
@@ -0,0 +1 @@
+FAILED: ParseException line 1:64 Failed to recognize predicate 'select'. 
Failed rule: 'queryStatementExpression' in subquery source

Added: hive/trunk/ql/src/test/results/clientpositive/cte_1.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/cte_1.q.out?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/cte_1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/cte_1.q.out Thu Feb  6 
23:34:52 2014
@@ -0,0 +1,165 @@
+PREHOOK: query: explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE 
Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 29 Data size: 2906 Basic stats: 
COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: with q1 as ( select key from src where key = '5')
+select *
+from q1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '5')
+select *
+from q1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: -- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+POSTHOOK: query: -- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE 
Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 29 Data size: 2906 Basic stats: 
COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: -- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+POSTHOOK: query: -- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE 
Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 29 Data size: 2906 Basic stats: 
COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5
+5
+5

Added: hive/trunk/ql/src/test/results/clientpositive/cte_2.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/cte_2.q.out?rev=1565483&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/cte_2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/cte_2.q.out Thu Feb  6 
23:34:52 2014
@@ -0,0 +1,201 @@
+PREHOOK: query: -- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+4      val_4
+5      val_5
+5      val_5
+5      val_5
+PREHOOK: query: -- insert test
+create table s1 like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- insert test
+create table s1 like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@s1
+PREHOOK: query: with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@s1
+POSTHOOK: query: with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@s1
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: select * from s1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@s1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from s1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@s1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+5      val_5
+5      val_5
+5      val_5
+PREHOOK: query: drop table s1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@s1
+PREHOOK: Output: default@s1
+POSTHOOK: query: drop table s1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@s1
+POSTHOOK: Output: default@s1
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: -- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+5      val_5
+5      val_5
+5      val_5
+PREHOOK: query: -- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: -- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@s2
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: select * from s2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@s2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from s2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@s2
+#### A masked pattern was here ####
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+4
+PREHOOK: query: drop table s2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@s2
+PREHOOK: Output: default@s2
+POSTHOOK: query: drop table s2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@s2
+POSTHOOK: Output: default@s2
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: -- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+POSTHOOK: query: -- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@v1
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: select * from v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+5
+5
+5
+PREHOOK: query: drop view v1
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v1
+PREHOOK: Output: default@v1
+POSTHOOK: query: drop view v1
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v1
+POSTHOOK: Output: default@v1
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: -- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+POSTHOOK: query: -- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@v1
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: with q1 as ( select key from src where key = '4')
+select * from v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '4')
+select * from v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+5
+5
+5
+PREHOOK: query: drop view v1
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v1
+PREHOOK: Output: default@v1
+POSTHOOK: query: drop view v1
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v1
+POSTHOOK: Output: default@v1
+POSTHOOK: Lineage: s1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, 
comment:default), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]


Reply via email to