Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java Tue Oct 21 11:29:18 2008 @@ -58,6 +58,14 @@ private LinkedHashMap<String, String> spec; + /** + * @return + * @see org.apache.hadoop.hive.metastore.api.Partition#getValues() + */ + public List<String> getValues() { + return tPartition.getValues(); + } + private Path partPath; private URI partURI;
Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Tue Oct 21 11:29:18 2008 @@ -95,7 +95,7 @@ initEmpty(); this.schema = schema; this.deserializer = deserializer; //TODO: convert to SerDeInfo format - this.getTTable().getSd().getSerdeInfo().setSerializationLib(deserializer.getShortName()); + this.getTTable().getSd().getSerdeInfo().setSerializationLib(deserializer.getClass().getName()); getTTable().setTableName(name); getSerdeInfo().setSerializationLib(deserializer.getClass().getName()); setInputFormatClass(inputFormatClass); @@ -108,7 +108,7 @@ initEmpty(); getTTable().setTableName(name); getTTable().setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME); - getSerdeInfo().setSerializationLib(MetadataTypedColumnsetSerDe.shortName()); + getSerdeInfo().setSerializationLib(MetadataTypedColumnsetSerDe.class.getName()); getSerdeInfo().getParameters().put(Constants.SERIALIZATION_FORMAT, "1"); } Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Tue Oct 21 11:29:18 2008 @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -43,6 +42,8 @@ protected final Hive db; protected final HiveConf conf; protected List<Task<? extends Serializable>> rootTasks; + protected Task<? extends Serializable> fetchTask; + protected boolean fetchTaskInit; protected final Log LOG; protected final LogHelper console; @@ -65,13 +66,40 @@ } } - public abstract void analyze(CommonTree ast, Context ctx) throws SemanticException; + public abstract void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException; + + public void analyze(CommonTree ast, Context ctx) throws SemanticException { + scratchDir = ctx.getScratchDir(); + analyzeInternal(ast, ctx); + } public List<Task<? extends Serializable>> getRootTasks() { return rootTasks; } - protected void reset() { + /** + * @return the fetchTask + */ + public Task<? extends Serializable> getFetchTask() { + return fetchTask; + } + + /** + * @param fetchTask the fetchTask to set + */ + public void setFetchTask(Task<? extends Serializable> fetchTask) { + this.fetchTask = fetchTask; + } + + public boolean getFetchTaskInit() { + return fetchTaskInit; + } + + public void setFetchTaskInit(boolean fetchTaskInit) { + this.fetchTaskInit = fetchTaskInit; + } + + protected void reset() { rootTasks = new ArrayList<Task<? extends Serializable>>(); } @@ -118,9 +146,33 @@ public static String unescapeSQLString(String b) { assert(b.charAt(0) == '\''); assert(b.charAt(b.length()-1) == '\''); + + // Some of the strings can be passed in as unicode. For example, the + // delimiter can be passed in as \002 - So, we first check if the + // string is a unicode number, else go back to the old behavior StringBuilder sb = new StringBuilder(b.length()); - for(int i=1; i+1<b.length(); i++) { - if (b.charAt(i) == '\\' && i+2<b.length()) { + int i = 1; + while (i < (b.length()-1)) { + + if (b.charAt(i) == '\\' && (i+4 < b.length())) { + char i1 = b.charAt(i+1); + char i2 = b.charAt(i+2); + char i3 = b.charAt(i+3); + if ((i1 >= '0' && i1 <= '1') && + (i2 >= '0' && i2 <= '7') && + (i3 >= '0' && i3 <= '7')) + { + byte bVal = (byte)((i3 - '0') + ((i2 - '0') * 8 ) + ((i1 - '0') * 8 * 8)); + byte[] bValArr = new byte[1]; + bValArr[0] = bVal; + String tmp = new String(bValArr); + sb.append(tmp); + i += 4; + continue; + } + } + + if (b.charAt(i) == '\\' && (i+2 < b.length())) { char n=b.charAt(i+1); switch(n) { case '0': sb.append("\0"); break; @@ -141,6 +193,7 @@ } else { sb.append(b.charAt(i)); } + i++; } return sb.toString(); } @@ -159,7 +212,7 @@ public HashMap<String, String> partSpec; public Partition partHandle; - public tableSpec(Hive db, CommonTree ast) throws SemanticException { + public tableSpec(Hive db, CommonTree ast, boolean forceCreatePartition) throws SemanticException { assert(ast.getToken().getType() == HiveParser.TOK_TAB); int childIndex = 0; @@ -179,7 +232,10 @@ String val = stripQuotes(partspec_val.getChild(1).getText()); partSpec.put(partspec_val.getChild(0).getText(), val); } - partHandle = Hive.get().getPartition(tableHandle, partSpec, true); + partHandle = Hive.get().getPartition(tableHandle, partSpec, forceCreatePartition); + if(partHandle == null) { + throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(childIndex))); + } } } catch (InvalidTableException ite) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(ast.getChild(0)), ite); Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Tue Oct 21 11:29:18 2008 @@ -18,13 +18,22 @@ package org.apache.hadoop.hive.ql.parse; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.antlr.runtime.tree.CommonTree; +import org.antlr.runtime.tree.Tree; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; - -import org.antlr.runtime.tree.CommonTree; - import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.DDLWork; @@ -32,17 +41,11 @@ import org.apache.hadoop.hive.ql.plan.createTableDesc; import org.apache.hadoop.hive.ql.plan.descTableDesc; import org.apache.hadoop.hive.ql.plan.dropTableDesc; +import org.apache.hadoop.hive.ql.plan.showPartitionsDesc; import org.apache.hadoop.hive.ql.plan.showTablesDesc; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.alterTableDesc.alterTableTypes; import org.apache.hadoop.hive.serde.Constants; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import java.util.*; - public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { private static final Log LOG = LogFactory.getLog("hive.ql.parse.DDLSemanticAnalyzer"); public static final Map<Integer, String> TokenToTypeName = new HashMap<Integer, String>(); @@ -67,7 +70,7 @@ } @Override - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { this.ctx = ctx; if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE) analyzeCreateTable(ast, false); @@ -88,7 +91,16 @@ else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME) analyzeAlterTableRename(ast); else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) - analyzeAlterTableAddCols(ast); + analyzeAlterTableModifyCols(ast, alterTableTypes.ADDCOLS); + else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) + analyzeAlterTableModifyCols(ast, alterTableTypes.REPLACECOLS); + else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) + analyzeAlterTableDropParts(ast); + else if (ast.getToken().getType() == HiveParser.TOK_SHOWPARTITIONS) + { + ctx.setResFile(new Path(getTmpFileName())); + analyzeShowPartitions(ast); + } } private void analyzeCreateTable(CommonTree ast, boolean isExt) @@ -107,8 +119,10 @@ String comment = null; boolean isSequenceFile = false; String location = null; + String serde = null; + Map<String, String> mapProp = null; - LOG.info("Creating table" + tableName); + LOG.info("Creating table" + tableName); int numCh = ast.getChildCount(); for (int num = 2; num < numCh; num++) { @@ -152,6 +166,18 @@ } } break; + case HiveParser.TOK_TABLESERIALIZER: + serde = unescapeSQLString(child.getChild(0).getText()); + if (child.getChildCount() == 2) { + mapProp = new HashMap<String, String>(); + CommonTree prop = (CommonTree)((CommonTree)child.getChild(1)).getChild(0); + for (int propChild = 0; propChild < prop.getChildCount(); propChild++) { + String key = unescapeSQLString(prop.getChild(propChild).getChild(0).getText()); + String value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText()); + mapProp.put(key,value); + } + } + break; case HiveParser.TOK_TBLSEQUENCEFILE: isSequenceFile = true; break; @@ -166,7 +192,7 @@ new createTableDesc(tableName, isExt, cols, partCols, bucketCols, sortCols, numBuckets, fieldDelim, collItemDelim, mapKeyDelim, lineDelim, - comment, isSequenceFile, location); + comment, isSequenceFile, location, serde, mapProp); validateCreateTable(crtTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(crtTblDesc), conf)); @@ -310,12 +336,34 @@ private void analyzeDescribeTable(CommonTree ast) throws SemanticException { - String tableName = ast.getChild(0).getText(); - descTableDesc descTblDesc = new descTableDesc(ctx.getResFile(), tableName); + Tree table_t = ast.getChild(0); + String tableName = table_t.getChild(0).getText(); + HashMap<String, String> partSpec = null; + // get partition metadata if partition specified + if (table_t.getChildCount() == 2) { + CommonTree partspec = (CommonTree) table_t.getChild(1); + partSpec = new LinkedHashMap<String, String>(); + for (int i = 0; i < partspec.getChildCount(); ++i) { + CommonTree partspec_val = (CommonTree) partspec.getChild(i); + String val = stripQuotes(partspec_val.getChild(1).getText()); + partSpec.put(partspec_val.getChild(0).getText(), val); + } + } + + boolean isExt = ast.getChildCount() > 1; + descTableDesc descTblDesc = new descTableDesc(ctx.getResFile(), tableName, partSpec, isExt); rootTasks.add(TaskFactory.get(new DDLWork(descTblDesc), conf)); LOG.info("analyzeDescribeTable done"); } + private void analyzeShowPartitions(CommonTree ast) + throws SemanticException { + showPartitionsDesc showPartsDesc; + String tableName = ast.getChild(0).getText(); + showPartsDesc = new showPartitionsDesc(tableName, ctx.getResFile()); + rootTasks.add(TaskFactory.get(new DDLWork(showPartsDesc), conf)); + } + private void analyzeShowTables(CommonTree ast) throws SemanticException { showTablesDesc showTblsDesc; @@ -335,34 +383,32 @@ rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); } - private void analyzeAlterTableAddCols(CommonTree ast) + private void analyzeAlterTableModifyCols(CommonTree ast, alterTableTypes alterType) throws SemanticException { String tblName = ast.getChild(0).getText(); List<FieldSchema> newCols = getColumns((CommonTree)ast.getChild(1)); - Table tbl; - try { - tbl = db.getTable(tblName); - } catch (HiveException e) { - throw new SemanticException(e.getMessage()); - } - List<FieldSchema> oldCols = tbl.getCols(); - - // make sure the columns does not already exist - Iterator<FieldSchema> iterNewCols = newCols.iterator(); - while (iterNewCols.hasNext()) { - FieldSchema newCol = iterNewCols.next(); - String newColName = newCol.getName(); - Iterator<FieldSchema> iterOldCols = oldCols.iterator(); - while (iterOldCols.hasNext()) { - String oldColName = iterOldCols.next().getName(); - if (oldColName.equalsIgnoreCase(newColName)) - throw new SemanticException(ErrorMsg.DUPLICATE_COLUMN_NAMES.getMsg()); - } - oldCols.add(newCol); - } - - alterTableDesc alterTblDesc = new alterTableDesc(tblName, oldCols); + alterTableDesc alterTblDesc = new alterTableDesc(tblName, newCols, alterType); rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); } + private void analyzeAlterTableDropParts(CommonTree ast) throws SemanticException { + String tblName = null; + List<HashMap<String, String>> partSpecs = new ArrayList<HashMap<String, String>>(); + int childIndex = 0; + // get table metadata + tblName = ast.getChild(0).getText(); + // get partition metadata if partition specified + for( childIndex = 1; childIndex < ast.getChildCount(); childIndex++) { + CommonTree partspec = (CommonTree) ast.getChild(childIndex); + HashMap<String, String> partSpec = new LinkedHashMap<String, String>(); + for (int i = 0; i < partspec.getChildCount(); ++i) { + CommonTree partspec_val = (CommonTree) partspec.getChild(i); + String val = stripQuotes(partspec_val.getChild(1).getText()); + partSpec.put(partspec_val.getChild(0).getText(), val); + } + partSpecs.add(partSpec); + } + dropTableDesc dropTblDesc = new dropTableDesc(tblName, partSpecs); + rootTasks.add(TaskFactory.get(new DDLWork(dropTblDesc), conf)); + } } Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Tue Oct 21 11:29:18 2008 @@ -28,6 +28,7 @@ GENERIC_ERROR("Exception while processing"), INVALID_TABLE("Table not found"), INVALID_COLUMN("Invalid Column Reference"), + INVALID_PARTITION("Partition not found"), AMBIGOUS_COLUMN("Ambigous Column Reference"), AMBIGOUS_TABLE_ALIAS("Ambigous Table Alias"), INVALID_TABLE_ALIAS("Invalid Table Alias"), @@ -47,12 +48,17 @@ ILLEGAL_PATH("Path is not legal"), INVALID_NUMERICAL_CONSTANT("Invalid Numerical Constant"), INVALID_ARRAYINDEX_CONSTANT("Non Constant Expressions for Array Indexes not Supported"), + INVALID_MAPINDEX_CONSTANT("Non Constant Expression for Map Indexes not Supported"), + INVALID_MAPINDEX_TYPE("Map Key Type does not Match Index Expression Type"), + NON_COLLECTION_TYPE("[] not Valid on Non Collection Types"), SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"), COLUMN_REPAEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"), DUPLICATE_COLUMN_NAMES("Duplicate column names"), COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in cluster and sort by"), SAMPLE_RESTRICTION("Cannot Sample on More Than Two Columns"), - SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"); + SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"), + NO_PARTITION_PREDICATE("No Partition Predicate Found"), + INVALID_DOT(". operator is only supported on struct or list of struct types"); private String mesg; ErrorMsg(String mesg) { Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Tue Oct 21 11:29:18 2008 @@ -18,12 +18,15 @@ package org.apache.hadoop.hive.ql.parse; -import java.io.File; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import org.antlr.runtime.tree.CommonTree; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.explainWork; @@ -34,7 +37,7 @@ super(conf); } - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { // Create a semantic analyzer for the query BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (CommonTree)ast.getChild(0)); @@ -46,9 +49,18 @@ } ctx.setResFile(new Path(getTmpFileName())); - - rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), - sem.getRootTasks(), + List<Task<? extends Serializable>> tasks = sem.getRootTasks(); + Task<? extends Serializable> fetchTask = sem.getFetchTask(); + if (tasks == null) { + if (fetchTask != null) { + tasks = new ArrayList<Task<? extends Serializable>>(); + tasks.add(fetchTask); + } + } + else if (fetchTask != null) + tasks.add(fetchTask); + + rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), tasks, ((CommonTree)ast.getChild(0)).toStringTree(), extended), this.conf)); } Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Tue Oct 21 11:29:18 2008 @@ -36,7 +36,7 @@ super(conf); } - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { String functionName = ast.getChild(0).getText(); String className = unescapeSQLString(ast.getChild(1).getText()); createFunctionDesc desc = new createFunctionDesc(functionName, className); Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Tue Oct 21 11:29:18 2008 @@ -79,7 +79,10 @@ TOK_DESCTABLE; TOK_ALTERTABLE_RENAME; TOK_ALTERTABLE_ADDCOLS; +TOK_ALTERTABLE_REPLACECOLS; +TOK_ALTERTABLE_DROPPARTS; TOK_SHOWTABLES; +TOK_SHOWPARTITIONS; TOK_CREATEEXTTABLE; TOK_DROPTABLE; TOK_TABCOLLIST; @@ -102,6 +105,11 @@ TOK_CHARSETLITERAL; TOK_CREATEFUNCTION; TOK_EXPLAIN; +TOK_TABLESERIALIZER; +TOK_TABLSERDEPROPERTIES; +TOK_TABLESERDEPROPLIST; +TOK_LIMIT; +TOKTABLESERDEPROPERTY; } @@ -161,6 +169,7 @@ alterStatement : alterStatementRename | alterStatementAddCol + | alterStatementDropPartitions ; alterStatementRename @@ -169,16 +178,23 @@ ; alterStatementAddCol - : KW_ALTER KW_TABLE Identifier KW_ADD KW_COLUMNS LPAREN columnNameTypeList RPAREN - -> ^(TOK_ALTERTABLE_ADDCOLS Identifier columnNameTypeList) + : KW_ALTER KW_TABLE Identifier (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN + -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS Identifier columnNameTypeList) + -> ^(TOK_ALTERTABLE_REPLACECOLS Identifier columnNameTypeList) + ; + +alterStatementDropPartitions + : KW_ALTER KW_TABLE Identifier KW_DROP partitionSpec (COMMA partitionSpec)* + -> ^(TOK_ALTERTABLE_DROPPARTS Identifier partitionSpec+) ; descStatement - : KW_DESCRIBE Identifier -> ^(TOK_DESCTABLE Identifier) + : KW_DESCRIBE (isExtended=KW_EXTENDED)? (tab=tabName) -> ^(TOK_DESCTABLE $tab $isExtended?) ; showStatement : KW_SHOW KW_TABLES showStmtIdentifier? -> ^(TOK_SHOWTABLES showStmtIdentifier?) + | KW_SHOW KW_PARTITIONS Identifier -> ^(TOK_SHOWPARTITIONS Identifier) ; createFunctionStatement @@ -211,6 +227,23 @@ : KW_ROW KW_FORMAT KW_DELIMITED tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier? -> ^(TOK_TABLEROWFORMAT tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?) + | KW_ROW KW_FORMAT KW_SERIALIZER name=StringLiteral tableSerializerProperties? + -> ^(TOK_TABLESERIALIZER $name tableSerializerProperties?) + ; + +tableSerializerProperties + : + KW_WITH KW_PROPERTIES LPAREN propertiesList RPAREN -> ^(TOK_TABLSERDEPROPERTIES propertiesList) + ; + +propertiesList + : + keyValueProperty (COMMA keyValueProperty)* -> ^(TOK_TABLESERDEPROPLIST keyValueProperty+) + ; + +keyValueProperty + : + key=StringLiteral EQUAL value=StringLiteral -> ^(TOKTABLESERDEPROPERTY $key $value) ; tableRowFormatFieldIdentifier @@ -328,14 +361,16 @@ whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT insertClause selectClause whereClause? groupByClause? orderByClause? clusterByClause?)) + clusterByClause? + limitClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT insertClause selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?)) | selectClause fromClause whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause?)) + clusterByClause? + limitClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?)) ; @@ -346,13 +381,15 @@ whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_INSERT insertClause? selectClause whereClause? groupByClause? orderByClause? clusterByClause?) + clusterByClause? + limitClause? -> ^(TOK_INSERT insertClause? selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?) | selectClause whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause?) + clusterByClause? + limitClause? -> ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?) ; insertClause @@ -367,6 +404,11 @@ | KW_TABLE tabName -> ^(tabName) ; +limitClause + : + KW_LIMIT num=Number -> ^(TOK_LIMIT $num) + ; + //----------------------- Rules for parsing selectClause ----------------------------- // select a,b,c ... selectClause @@ -581,7 +623,7 @@ precedenceUnaryOperator : - MINUS | TILDE + PLUS | MINUS | TILDE ; precedenceUnaryExpression @@ -741,6 +783,7 @@ KW_FULL : 'FULL'; KW_ON : 'ON'; KW_PARTITION : 'PARTITION'; +KW_PARTITIONS : 'PARTITIONS'; KW_TABLE: 'TABLE'; KW_TABLES: 'TABLES'; KW_SHOW: 'SHOW'; @@ -798,6 +841,7 @@ KW_OF: 'OF'; KW_CAST: 'CAST'; KW_ADD: 'ADD'; +KW_REPLACE: 'REPLACE'; KW_COLUMNS: 'COLUMNS'; KW_RLIKE: 'RLIKE'; KW_REGEXP: 'REGEXP'; @@ -805,6 +849,10 @@ KW_FUNCTION: 'FUNCTION'; KW_EXPLAIN: 'EXPLAIN'; KW_EXTENDED: 'EXTENDED'; +KW_SERIALIZER: 'SERIALIZER'; +KW_WITH: 'WITH'; +KW_PROPERTIES: 'SERDEPROPERTIES'; +KW_LIMIT: 'LIMIT'; // Operators Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Tue Oct 21 11:29:18 2008 @@ -156,7 +156,7 @@ } @Override - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { isLocal = isOverWrite = false; Tree from_t = ast.getChild(0); Tree table_t = ast.getChild(1); @@ -185,7 +185,7 @@ } // initialize destination table/partition - tableSpec ts = new tableSpec(db, (CommonTree) table_t); + tableSpec ts = new tableSpec(db, (CommonTree) table_t, true); URI toURI = (ts.partHandle != null) ? ts.partHandle.getDataLocation() : ts.tableHandle.getDataLocation(); // make sure the arguments make sense Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java Tue Oct 21 11:29:18 2008 @@ -17,14 +17,6 @@ */ package org.apache.hadoop.hive.ql.parse; -/* - * PartitionPruner.java - * - * Created on April 9, 2008, 3:48 PM - * - * To change this template, choose Tools | Template Manager - * and open the template in the editor. - */ import java.util.*; @@ -73,7 +65,7 @@ this.tableAlias = tableAlias; this.metaData = metaData; this.tab = metaData.getTableForAlias(tableAlias); - this.prunerExpr = new exprNodeConstantDesc(Boolean.TRUE); + this.prunerExpr = null; } /** @@ -106,8 +98,8 @@ case HiveParser.TOK_COLREF: { assert(expr.getChildCount() == 2); - String tabAlias = SemanticAnalyzer.getTableName(expr); - String colName = SemanticAnalyzer.getSerDeFieldExpression(expr); + String tabAlias = expr.getChild(0).getText(); + String colName = expr.getChild(1).getText(); if (tabAlias == null || colName == null) { throw new SemanticException(ErrorMsg.INVALID_XPATH.getMsg(expr)); } @@ -115,11 +107,17 @@ if (tabAlias.equals(tableAlias) && tab.isPartitionKey(colName)) { desc = new exprNodeColumnDesc(String.class, colName); } else { - // might be a column from another table try { - TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector( - this.metaData.getTableForAlias(tabAlias).getDeserializer().getObjectInspector()); - desc = new exprNodeConstantDesc(typeInfo.getStructFieldTypeInfo(colName), null); + // might be a column from another table + Table t = this.metaData.getTableForAlias(tabAlias); + if (t.isPartitionKey(colName)) { + desc = new exprNodeConstantDesc(String.class, null); + } + else { + TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector( + this.metaData.getTableForAlias(tabAlias).getDeserializer().getObjectInspector()); + desc = new exprNodeConstantDesc(typeInfo.getStructFieldTypeInfo(colName), null); + } } catch (SerDeException e){ throw new RuntimeException(e); } @@ -189,6 +187,37 @@ return false; } + public boolean hasPartitionPredicate(CommonTree expr) { + + int tokType = expr.getType(); + boolean hasPPred = false; + switch (tokType) { + case HiveParser.TOK_COLREF: { + + assert(expr.getChildCount() == 2); + String tabAlias = expr.getChild(0).getText(); + String colName = expr.getChild(1).getText(); + if (tabAlias.equals(tableAlias) && tab.isPartitionKey(colName)) { + hasPPred = true; + } + break; + } + + default: { + boolean isFunction = (expr.getType() == HiveParser.TOK_FUNCTION); + + // Create all children + int childrenBegin = (isFunction ? 1 : 0); + for (int ci=childrenBegin; ci<expr.getChildCount(); ci++) { + hasPPred = (hasPPred || hasPartitionPredicate((CommonTree)expr.getChild(ci))); + } + break; + } + } + + return hasPPred; + } + /** Add an expression */ @SuppressWarnings("nls") public void addExpression(CommonTree expr) throws SemanticException { @@ -197,7 +226,10 @@ // Ignore null constant expressions if (!(desc instanceof exprNodeConstantDesc) || ((exprNodeConstantDesc)desc).getValue() != null ) { LOG.trace("adding pruning expr = " + desc); - this.prunerExpr = SemanticAnalyzer.getFuncExprNodeDesc("AND", this.prunerExpr, desc); + if (this.prunerExpr == null) + this.prunerExpr = desc; + else + this.prunerExpr = SemanticAnalyzer.getFuncExprNodeDesc("OR", this.prunerExpr, desc); } } @@ -208,13 +240,15 @@ LOG.trace("tabname = " + this.tab.getName()); LOG.trace("prune Expression = " + this.prunerExpr); - HashSet<Partition> ret_parts = new HashSet<Partition>(); + LinkedHashSet<Partition> ret_parts = new LinkedHashSet<Partition>(); try { StructObjectInspector rowObjectInspector = (StructObjectInspector)this.tab.getDeserializer().getObjectInspector(); Object[] rowWithPart = new Object[2]; InspectableObject inspectableObject = new InspectableObject(); - - ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr); + + ExprNodeEvaluator evaluator = null; + if (this.prunerExpr != null) + evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr); for(Partition part: Hive.get().getPartitions(this.tab)) { // Set all the variables here LinkedHashMap<String, String> partSpec = part.getSpec(); @@ -237,14 +271,18 @@ StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois); // evaluate the expression tree - evaluator.evaluate(rowWithPart, rowWithPartObjectInspector, inspectableObject); - LOG.trace("prune result for partition " + partSpec + ": " + inspectableObject.o); - if (!Boolean.FALSE.equals(inspectableObject.o)) { - LOG.debug("retained partition: " + partSpec); - ret_parts.add(part); - } else { - LOG.trace("pruned partition: " + partSpec); + if (evaluator != null) { + evaluator.evaluate(rowWithPart, rowWithPartObjectInspector, inspectableObject); + LOG.trace("prune result for partition " + partSpec + ": " + inspectableObject.o); + if (!Boolean.FALSE.equals(inspectableObject.o)) { + LOG.debug("retained partition: " + partSpec); + ret_parts.add(part); + } else { + LOG.trace("pruned partition: " + partSpec); + } } + else + ret_parts.add(part); } } catch (Exception e) { Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Tue Oct 21 11:29:18 2008 @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.parse.QBParseInfo; import org.apache.hadoop.hive.ql.parse.QBMetaData; +import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -45,6 +46,7 @@ private QBMetaData qbm; private QBJoinTree qbjoin; private String id; + private boolean isQuery; public void print(String msg) { LOG.info(msg + "alias=" + qbp.getAlias()); @@ -59,6 +61,9 @@ public QB(String outer_id, String alias, boolean isSubQ) { aliasToTabs = new HashMap<String, String>(); aliasToSubq = new HashMap<String, QBExpr>(); + if (alias != null) { + alias = alias.toLowerCase(); + } qbp = new QBParseInfo(alias, isSubQ); qbm = new QBMetaData(); this.id = (outer_id == null ? alias : outer_id + ":" + alias); @@ -85,6 +90,7 @@ } public boolean exists(String alias) { + alias = alias.toLowerCase(); if (aliasToTabs.get(alias) != null || aliasToSubq.get(alias) != null) return true; @@ -92,11 +98,11 @@ } public void setTabAlias(String alias, String tabName) { - aliasToTabs.put(alias, tabName); + aliasToTabs.put(alias.toLowerCase(), tabName); } public void setSubqAlias(String alias, QBExpr qbexpr) { - aliasToSubq.put(alias, qbexpr); + aliasToSubq.put(alias.toLowerCase(), qbexpr); } public String getId() { @@ -128,11 +134,11 @@ } public QBExpr getSubqForAlias(String alias) { - return aliasToSubq.get(alias); + return aliasToSubq.get(alias.toLowerCase()); } public String getTabNameForAlias(String alias) { - return aliasToTabs.get(alias); + return aliasToTabs.get(alias.toLowerCase()); } public QBJoinTree getQbJoinTree() { @@ -142,4 +148,24 @@ public void setQbJoinTree(QBJoinTree qbjoin) { this.qbjoin = qbjoin; } + + public void setIsQuery(boolean isQuery) { + this.isQuery = isQuery; + } + + public boolean getIsQuery() { + return isQuery; + } + + public boolean isSelectStarQuery() { + if (!qbp.isSelectStarQuery() || !aliasToSubq.isEmpty()) + return false; + + Iterator<Map.Entry<String, Table>> iter = qbm.getAliasToTable().entrySet().iterator(); + Table tab = ((Map.Entry<String, Table>)iter.next()).getValue(); + if (tab.isPartitioned()) + return false; + + return true; + } } Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java Tue Oct 21 11:29:18 2008 @@ -64,8 +64,16 @@ this.outTypes.add(cls); } + // All getXXX needs toLowerCase() because they are directly called from SemanticAnalyzer + // All setXXX does not need it because they are called from QB which already lowercases + // the aliases. + + public HashMap<String, Table> getAliasToTable() { + return aliasToTable; + } + public Table getTableForAlias(String alias) { - return this.aliasToTable.get(alias); + return this.aliasToTable.get(alias.toLowerCase()); } public void setSrcForAlias(String alias, Table tab) { @@ -89,23 +97,23 @@ } public Integer getDestTypeForAlias(String alias) { - return this.nameToDestType.get(alias); + return this.nameToDestType.get(alias.toLowerCase()); } public Table getDestTableForAlias(String alias) { - return this.nameToDestTable.get(alias); + return this.nameToDestTable.get(alias.toLowerCase()); } public Partition getDestPartitionForAlias(String alias) { - return this.nameToDestPartition.get(alias); + return this.nameToDestPartition.get(alias.toLowerCase()); } public String getDestFileForAlias(String alias) { - return this.nameToDestFile.get(alias); + return this.nameToDestFile.get(alias.toLowerCase()); } public Table getSrcForAlias(String alias) { - return this.aliasToTable.get(alias); + return this.aliasToTable.get(alias.toLowerCase()); } } Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java Tue Oct 21 11:29:18 2008 @@ -32,6 +32,7 @@ public class QBParseInfo { private boolean isSubQ; + private boolean canOptTopQ; private String alias; private CommonTree joinExpr; private HashMap<String, CommonTree> aliasToSrc; @@ -41,6 +42,8 @@ private HashMap<String, CommonTree> destToWhereExpr; private HashMap<String, CommonTree> destToGroupby; private HashMap<String, CommonTree> destToClusterby; + private HashMap<String, Integer> destToLimit; + private int outerQueryLimit; // used by GroupBy private HashMap<String, HashMap<String, CommonTree> > destToAggregationExprs; @@ -57,12 +60,15 @@ this.destToWhereExpr = new HashMap<String, CommonTree>(); this.destToGroupby = new HashMap<String, CommonTree>(); this.destToClusterby = new HashMap<String, CommonTree>(); + this.destToLimit = new HashMap<String, Integer>(); this.destToAggregationExprs = new HashMap<String, HashMap<String, CommonTree> >(); this.destToDistinctFuncExpr = new HashMap<String, CommonTree>(); this.alias = alias; this.isSubQ = isSubQ; + this.canOptTopQ = false; + this.outerQueryLimit = -1; } public void setAggregationExprsForClause(String clause, HashMap<String, CommonTree> aggregationTrees) { @@ -102,7 +108,7 @@ } public void setSrcForAlias(String alias, CommonTree ast) { - this.aliasToSrc.put(alias, ast); + this.aliasToSrc.put(alias.toLowerCase(), ast); } public Set<String> getClauseNames() { @@ -134,7 +140,7 @@ } public CommonTree getSrcForAlias(String alias) { - return this.aliasToSrc.get(alias); + return this.aliasToSrc.get(alias.toLowerCase()); } public String getAlias() { @@ -145,6 +151,14 @@ return this.isSubQ; } + public boolean getCanOptTopQ() { + return this.canOptTopQ; + } + + public void setCanOptTopQ(boolean canOptTopQ) { + this.canOptTopQ = canOptTopQ; + } + public CommonTree getJoinExpr() { return this.joinExpr; } @@ -152,12 +166,87 @@ public void setJoinExpr(CommonTree joinExpr) { this.joinExpr = joinExpr; } - + public TableSample getTabSample(String alias) { - return this.nameToSample.get(alias); + return this.nameToSample.get(alias.toLowerCase()); } public void setTabSample(String alias, TableSample tableSample) { - this.nameToSample.put(alias, tableSample); + this.nameToSample.put(alias.toLowerCase(), tableSample); } + + public void setDestLimit(String dest, Integer limit) { + this.destToLimit.put(dest, limit); + } + + public Integer getDestLimit(String dest) { + return this.destToLimit.get(dest); + } + + /** + * @return the outerQueryLimit + */ + public int getOuterQueryLimit() { + return outerQueryLimit; + } + + /** + * @param outerQueryLimit the outerQueryLimit to set + */ + public void setOuterQueryLimit(int outerQueryLimit) { + this.outerQueryLimit = outerQueryLimit; + } + + public boolean isSelectStarQuery() { + if (isSubQ || + (joinExpr != null) || + (!nameToSample.isEmpty()) || + (!destToWhereExpr.isEmpty()) || + (!destToGroupby.isEmpty()) || + (!destToClusterby.isEmpty())) + return false; + + Iterator<Map.Entry<String, HashMap<String, CommonTree>>> aggrIter = destToAggregationExprs.entrySet().iterator(); + while (aggrIter.hasNext()) { + HashMap<String, CommonTree> h = aggrIter.next().getValue(); + if ((h != null) && (!h.isEmpty())) + return false; + } + + if (!destToDistinctFuncExpr.isEmpty()) { + Iterator<Map.Entry<String, CommonTree>> distn = destToDistinctFuncExpr.entrySet().iterator(); + while (distn.hasNext()) { + CommonTree ct = distn.next().getValue(); + if (ct != null) + return false; + } + } + + Iterator<Map.Entry<String, CommonTree>> iter = nameToDest.entrySet().iterator(); + while (iter.hasNext()) { + Map.Entry<String, CommonTree> entry = iter.next(); + CommonTree v = entry.getValue(); + if (!(((CommonTree)v.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) + return false; + } + + iter = destToSelExpr.entrySet().iterator(); + while (iter.hasNext()) { + Map.Entry<String, CommonTree> entry = iter.next(); + CommonTree selExprList = entry.getValue(); + // Iterate over the selects + for (int i = 0; i < selExprList.getChildCount(); ++i) { + + // list of the columns + CommonTree selExpr = (CommonTree) selExprList.getChild(i); + CommonTree sel = (CommonTree)selExpr.getChild(0); + + if (sel.getToken().getType() != HiveParser.TOK_ALLCOLREF) + return false; + } + } + + return true; + } + } Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=706708&r1=706707&r2=706708&view=diff ============================================================================== --- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original) +++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Tue Oct 21 11:29:18 2008 @@ -50,6 +50,9 @@ } public void put(String tab_alias, String col_alias, ColumnInfo colInfo) { + if (tab_alias != null) { + tab_alias = tab_alias.toLowerCase(); + } col_alias = col_alias.toLowerCase(); if (rowSchema.getSignature() == null) { rowSchema.setSignature(new Vector<ColumnInfo>()); @@ -71,10 +74,11 @@ } public boolean hasTableAlias(String tab_alias) { - return rslvMap.get(tab_alias) != null ? true : false; + return rslvMap.get(tab_alias.toLowerCase()) != null; } public ColumnInfo get(String tab_alias, String col_alias) { + tab_alias = tab_alias.toLowerCase(); col_alias = col_alias.toLowerCase(); HashMap<String, ColumnInfo> f_map = rslvMap.get(tab_alias); if (f_map == null) { @@ -88,7 +92,7 @@ } public HashMap<String, ColumnInfo> getFieldMap(String tab_alias) { - return rslvMap.get(tab_alias); + return rslvMap.get(tab_alias.toLowerCase()); } public int getPosition(String internalName) {
