Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Tue Oct 14 19:06:45 2014 @@ -479,8 +479,9 @@ import java.util.HashMap; xlateMap.put("KW_SUBQUERY", "SUBQUERY"); xlateMap.put("KW_REWRITE", "REWRITE"); xlateMap.put("KW_UPDATE", "UPDATE"); - xlateMap.put("KW_VALUES", "VALUES"); + xlateMap.put("KW_PURGE", "PURGE"); + // Operators xlateMap.put("DOT", "."); @@ -929,7 +930,7 @@ dropIndexStatement dropTableStatement @init { pushMsg("drop statement", state); } @after { popMsg(state); } - : KW_DROP KW_TABLE ifExists? tableName -> ^(TOK_DROPTABLE tableName ifExists?) + : KW_DROP KW_TABLE ifExists? tableName KW_PURGE? -> ^(TOK_DROPTABLE tableName ifExists? KW_PURGE?) ; alterStatement @@ -945,8 +946,6 @@ alterTableStatementSuffix @init { pushMsg("alter table statement", state); } @after { popMsg(state); } : alterStatementSuffixRename[true] - | alterStatementSuffixAddCol - | alterStatementSuffixRenameCol | alterStatementSuffixUpdateStatsCol | alterStatementSuffixDropPartitions[true] | alterStatementSuffixAddPartitions[true] @@ -974,6 +973,8 @@ alterTblPartitionStatementSuffix | alterStatementSuffixClusterbySortby | alterStatementSuffixCompact | alterStatementSuffixUpdateStatsCol + | alterStatementSuffixRenameCol + | alterStatementSuffixAddCol ; alterStatementPartitionKeyType @@ -1333,7 +1334,7 @@ showStatement | KW_SHOW KW_TABLES ((KW_FROM|KW_IN) db_name=identifier)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)? -> ^(TOK_SHOWTABLES (TOK_FROM $db_name)? showStmtIdentifier?) | KW_SHOW KW_COLUMNS (KW_FROM|KW_IN) tableName ((KW_FROM|KW_IN) db_name=identifier)? -> ^(TOK_SHOWCOLUMNS tableName $db_name?) - | KW_SHOW KW_FUNCTIONS showFunctionIdentifier? -> ^(TOK_SHOWFUNCTIONS showFunctionIdentifier?) + | KW_SHOW KW_FUNCTIONS (KW_LIKE showFunctionIdentifier|showFunctionIdentifier)? -> ^(TOK_SHOWFUNCTIONS KW_LIKE? showFunctionIdentifier?) | KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec?) | KW_SHOW KW_CREATE KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName) | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=identifier)? KW_LIKE showStmtIdentifier partitionSpec? @@ -1530,8 +1531,8 @@ principalSpecification principalName @init {pushMsg("user|group|role name", state);} @after {popMsg(state);} - : KW_USER identifier -> ^(TOK_USER identifier) - | KW_GROUP identifier -> ^(TOK_GROUP identifier) + : KW_USER principalIdentifier -> ^(TOK_USER principalIdentifier) + | KW_GROUP principalIdentifier -> ^(TOK_GROUP principalIdentifier) | KW_ROLE identifier -> ^(TOK_ROLE identifier) ; @@ -2237,7 +2238,7 @@ deleteStatement /*SET <columName> = (3 + col2)*/ columnAssignmentClause : - tableOrColumn EQUAL^ atomExpression + tableOrColumn EQUAL^ precedencePlusExpression ; /*SET col1 = 5, col2 = (4 + col4), ...*/
Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java Tue Oct 14 19:06:45 2014 @@ -57,4 +57,12 @@ public interface HiveSemanticAnalyzerHoo public String getUserName(); public void setUserName(String userName); + + public String getIpAddress(); + + public void setIpAddress(String ipAddress); + + public String getCommand(); + + public void setCommand(String command); } Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java Tue Oct 14 19:06:45 2014 @@ -33,6 +33,8 @@ public class HiveSemanticAnalyzerHookCon Set<ReadEntity> inputs = null; Set<WriteEntity> outputs = null; private String userName; + private String ipAddress; + private String command; @Override public Hive getHive() throws HiveException { @@ -73,4 +75,24 @@ public class HiveSemanticAnalyzerHookCon public void setUserName(String userName) { this.userName = userName; } + + @Override + public String getIpAddress() { + return ipAddress; + } + + @Override + public void setIpAddress(String ipAddress) { + this.ipAddress = ipAddress; + } + + @Override + public String getCommand() { + return command; + } + + @Override + public void setCommand(String command) { + this.command = command; + } } Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Tue Oct 14 19:06:45 2014 @@ -536,6 +536,13 @@ functionIdentifier identifier ; +principalIdentifier +@init { gParent.pushMsg("identifier for principal spec", state); } +@after { gParent.popMsg(state); } + : identifier + | QuotedIdentifier + ; + nonReserved : KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_AN ALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION | KW_VALUES Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Tue Oct 14 19:06:45 2014 @@ -131,7 +131,7 @@ public class ParseDriver { * so that the graph walking algorithms and the rules framework defined in * ql.lib can be used with the AST Nodes. */ - static final TreeAdaptor adaptor = new CommonTreeAdaptor() { + public static final TreeAdaptor adaptor = new CommonTreeAdaptor() { /** * Creates an ASTNode for the given token. The ASTNode is a wrapper around * antlr's CommonTree class that implements the Node interface. Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Tue Oct 14 19:06:45 2014 @@ -111,7 +111,7 @@ public final class ParseUtils { * @param tableFieldTypeInfo TypeInfo to convert to * @return Expression converting column to the type specified by tableFieldTypeInfo */ - static ExprNodeDesc createConversionCast(ExprNodeDesc column, PrimitiveTypeInfo tableFieldTypeInfo) + public static ExprNodeDesc createConversionCast(ExprNodeDesc column, PrimitiveTypeInfo tableFieldTypeInfo) throws SemanticException { // Get base type, since type string may be parameterized String baseType = TypeInfoUtils.getBaseName(tableFieldTypeInfo.getTypeName()); Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java Tue Oct 14 19:06:45 2014 @@ -104,10 +104,18 @@ public class QBMetaData { return nameToDestTable.get(alias.toLowerCase()); } + public Map<String, Table> getNameToDestTable() { + return nameToDestTable; + } + public Partition getDestPartitionForAlias(String alias) { return nameToDestPartition.get(alias.toLowerCase()); } + public Map<String, Partition> getNameToDestPartition() { + return nameToDestPartition; + } + public String getDestFileForAlias(String alias) { return nameToDestFile.get(alias.toLowerCase()); } Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=1631841&r1=1631840&r2=1631841&view=diff ============================================================================== --- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original) +++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Tue Oct 14 19:06:45 2014 @@ -49,7 +49,7 @@ public class RowResolver implements Seri * The primary(first) mapping is still only held in * invRslvMap. */ - private Map<String, String[]> altInvRslvMap; + private final Map<String, String[]> altInvRslvMap; private Map<String, ASTNode> expressionMap; // TODO: Refactor this and do in a more object oriented manner @@ -351,4 +351,73 @@ public class RowResolver implements Seri this.expressionMap = expressionMap; } + + // TODO: 1) How to handle collisions? 2) Should we be cloning ColumnInfo or + // not? + public static int add(RowResolver rrToAddTo, RowResolver rrToAddFrom, + int outputColPos, int numColumns) throws SemanticException { + String tabAlias; + String colAlias; + String[] qualifiedColName; + int i = 0; + + for (ColumnInfo cInfoFrmInput : rrToAddFrom.getRowSchema().getSignature()) { + if ( numColumns >= 0 && i == numColumns ) { + break; + } + ColumnInfo newCI = null; + qualifiedColName = rrToAddFrom.getInvRslvMap().get( + cInfoFrmInput.getInternalName()); + tabAlias = qualifiedColName[0]; + colAlias = qualifiedColName[1]; + + newCI = new ColumnInfo(cInfoFrmInput); + newCI.setInternalName(SemanticAnalyzer + .getColumnInternalName(outputColPos)); + + outputColPos++; + + if (rrToAddTo.get(tabAlias, colAlias) != null) { + LOG.debug("Found duplicate column alias in RR: " + rrToAddTo.get(tabAlias, colAlias)); + } else { + rrToAddTo.put(tabAlias, colAlias, newCI); + } + + qualifiedColName = rrToAddFrom.getAlternateMappings(cInfoFrmInput + .getInternalName()); + if (qualifiedColName != null) { + tabAlias = qualifiedColName[0]; + colAlias = qualifiedColName[1]; + rrToAddTo.put(tabAlias, colAlias, newCI); + } + i++; + } + + return outputColPos; + } + + public static int add(RowResolver rrToAddTo, RowResolver rrToAddFrom, + int outputColPos) throws SemanticException { + return add(rrToAddTo, rrToAddFrom, outputColPos, -1); + } + + /** + * Return a new row resolver that is combination of left RR and right RR. + * The schema will be schema of left, schema of right + * + * @param leftRR + * @param rightRR + * @return + * @throws SemanticException + */ + public static RowResolver getCombinedRR(RowResolver leftRR, + RowResolver rightRR) throws SemanticException { + int outputColPos = 0; + + RowResolver combinedRR = new RowResolver(); + outputColPos = add(combinedRR, leftRR, outputColPos); + outputColPos = add(combinedRR, rightRR, outputColPos); + + return combinedRR; + } }
