Author: hashutosh
Date: Fri Feb 7 16:02:41 2014
New Revision: 1565702
URL: http://svn.apache.org/r1565702
Log:
HIVE-6342 : hive drop partitions should use standard expr filter instead of
some custom class (Sergey Shelukhin via Ashutosh Chauhan)
Removed:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionSpec.java
hive/trunk/ql/src/test/queries/clientnegative/drop_partition_filter_failure2.q
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
hive/trunk/ql/src/test/results/clientnegative/drop_partition_failure.q.out
hive/trunk/ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1565702&r1=1565701&r2=1565702&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
(original)
+++
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
Fri Feb 7 16:02:41 2014
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.ql.plan.De
import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.plan.PartitionSpec;
import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
@@ -314,12 +313,12 @@ public class HCatSemanticAnalyzer extend
// table is partitioned.
} else {
//this is actually a ALTER TABLE DROP PARITITION statement
- for (PartitionSpec partSpec : dropTable.getPartSpecs()) {
+ for (DropTableDesc.PartSpec partSpec : dropTable.getPartSpecs()) {
// partitions are not added as write entries in drop partitions in
Hive
Table table = hive.getTable(SessionState.get().getCurrentDatabase(),
dropTable.getTableName());
List<Partition> partitions = null;
try {
- partitions = hive.getPartitionsByFilter(table,
partSpec.toString());
+ partitions = hive.getPartitionsByFilter(table,
partSpec.getPartSpec().getExprString());
} catch (Exception e) {
throw new HiveException(e);
}
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1565702&r1=1565701&r2=1565702&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
(original)
+++
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
Fri Feb 7 16:02:41 2014
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.ql.plan.De
import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.plan.PartitionSpec;
import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
@@ -315,12 +314,12 @@ public class HCatSemanticAnalyzer extend
// table is partitioned.
} else {
//this is actually a ALTER TABLE DROP PARITITION statement
- for (PartitionSpec partSpec : dropTable.getPartSpecs()) {
+ for (DropTableDesc.PartSpec partSpec : dropTable.getPartSpecs()) {
// partitions are not added as write entries in drop partitions in
Hive
Table table = hive.getTable(SessionState.get().getCurrentDatabase(),
dropTable.getTableName());
List<Partition> partitions = null;
try {
- partitions = hive.getPartitionsByFilter(table,
partSpec.toString());
+ partitions = hive.getPartitionsByFilter(table,
partSpec.getPartSpec().getExprString());
} catch (Exception e) {
throw new HiveException(e);
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1565702&r1=1565701&r2=1565702&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Feb
7 16:02:41 2014
@@ -131,7 +131,6 @@ import org.apache.hadoop.hive.ql.plan.Gr
import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.LockTableDesc;
import org.apache.hadoop.hive.ql.plan.MsckDesc;
-import org.apache.hadoop.hive.ql.plan.PartitionSpec;
import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
@@ -177,6 +176,7 @@ import org.apache.hadoop.hive.shims.Shim
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.thrift.TException;
import org.stringtemplate.v4.ST;
/**
@@ -3570,26 +3570,24 @@ public class DDLTask extends Task<DDLWor
} else {
// This is actually an ALTER TABLE DROP PARTITION
List<Partition> partsToDelete = new ArrayList<Partition>();
- for (PartitionSpec partSpec : dropTbl.getPartSpecs()) {
- List<Partition> partitions = null;
- // getPartitionsByFilter only works for string columns.
- // Till that is fixed, only equality will work for non-string columns.
- if (dropTbl.isStringPartitionColumns()) {
- try {
- partitions = db.getPartitionsByFilter(tbl, partSpec.toString());
- } catch (Exception e) {
- throw new HiveException(e);
- }
+ for (DropTableDesc.PartSpec partSpec : dropTbl.getPartSpecs()) {
+ List<Partition> partitions = new ArrayList<Partition>();
+ boolean hasUnknown;
+ try {
+ hasUnknown = db.getPartitionsByExpr(tbl, partSpec.getPartSpec(),
conf, partitions);
+ } catch (TException e) {
+ throw new HiveException(e);
}
- else {
- partitions = db.getPartitions(tbl,
partSpec.getPartSpecWithoutOperator());
+ if (hasUnknown) {
+ throw new HiveException("Unexpected unknown partititions from "
+ + partSpec.getPartSpec().getExprString());
}
// this is to prevent dropping archived partition which is archived in
a
// different level the drop command specified.
int partPrefixToDrop = 0;
for (FieldSchema fs : tbl.getPartCols()) {
- if (partSpec.existsKey(fs.getName())) {
+ if (partSpec.getPartSpecKeys().contains(fs.getName())) {
partPrefixToDrop += 1;
} else {
break;
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1565702&r1=1565701&r2=1565702&view=diff
==============================================================================
---
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
(original)
+++
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
Fri Feb 7 16:02:41 2014
@@ -56,6 +56,7 @@ import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -92,6 +93,10 @@ import org.apache.hadoop.hive.ql.plan.De
import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
@@ -99,7 +104,6 @@ import org.apache.hadoop.hive.ql.plan.Lo
import org.apache.hadoop.hive.ql.plan.LockTableDesc;
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.MsckDesc;
-import org.apache.hadoop.hive.ql.plan.PartitionSpec;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc;
import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
@@ -125,10 +129,13 @@ import org.apache.hadoop.hive.serde.serd
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import com.google.common.collect.Lists;
+
/**
* DDLSemanticAnalyzer.
*
@@ -700,8 +707,7 @@ public class DDLSemanticAnalyzer extends
outputs.add(new WriteEntity(tab));
}
- DropTableDesc dropTblDesc = new DropTableDesc(
- tableName, expectView, ifExists, true);
+ DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectView,
ifExists);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
dropTblDesc), conf));
}
@@ -2390,32 +2396,13 @@ public class DDLSemanticAnalyzer extends
String tblName = getUnescapedName((ASTNode) ast.getChild(0));
// get table metadata
- List<PartitionSpec> partSpecs = getFullPartitionSpecs(ast);
Table tab = getTable(tblName, true);
+ List<ExprNodeGenericFuncDesc> partSpecs = new
ArrayList<ExprNodeGenericFuncDesc>();
+ List<List<String>> names = new ArrayList<List<String>>();
+ getFullPartitionSpecs(ast, tab, partSpecs, names);
validateAlterTableType(tab, AlterTableTypes.DROPPARTITION, expectView);
inputs.add(new ReadEntity(tab));
- // Find out if all partition columns are strings. This is needed for JDO
- boolean stringPartitionColumns = true;
- List<FieldSchema> partCols = tab.getPartCols();
-
- for (FieldSchema partCol : partCols) {
- if (!partCol.getType().toLowerCase().equals("string")) {
- stringPartitionColumns = false;
- break;
- }
- }
-
- // Only equality is supported for non-string partition columns
- if (!stringPartitionColumns) {
- for (PartitionSpec partSpec : partSpecs) {
- if (partSpec.isNonEqualityOperator()) {
- throw new SemanticException(
-
ErrorMsg.DROP_PARTITION_NON_STRING_PARTCOLS_NONEQUALITY.getMsg());
- }
- }
- }
-
boolean ignoreProtection =
(ast.getFirstChildWithType(HiveParser.TOK_IGNOREPROTECTION) != null);
if (partSpecs != null) {
boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS)
!= null);
@@ -2423,11 +2410,10 @@ public class DDLSemanticAnalyzer extends
// configured not to fail silently
boolean throwException =
!ifExists && !HiveConf.getBoolVar(conf,
ConfVars.DROPIGNORESNONEXISTENT);
- addTableDropPartsOutputs(tblName, partSpecs, throwException,
- stringPartitionColumns, ignoreProtection);
+ addTableDropPartsOutputs(tblName, partSpecs, throwException,
ignoreProtection);
}
DropTableDesc dropTblDesc =
- new DropTableDesc(tblName, partSpecs, expectView,
stringPartitionColumns, ignoreProtection);
+ new DropTableDesc(tblName, partSpecs, names, expectView,
ignoreProtection);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
dropTblDesc), conf));
@@ -2741,32 +2727,48 @@ public class DDLSemanticAnalyzer extends
*
* @param ast
* Tree to extract partitions from.
- * @return A list of PartitionSpec objects which contain the mapping from
- * key to operator and value.
* @throws SemanticException
*/
- private List<PartitionSpec> getFullPartitionSpecs(CommonTree ast)
- throws SemanticException {
- List<PartitionSpec> partSpecList = new ArrayList<PartitionSpec>();
+ private void getFullPartitionSpecs(CommonTree ast, Table tab,
+ List<ExprNodeGenericFuncDesc> exprs, List<List<String>> cols) throws
SemanticException {
+ Map<String, String> colTypes = new HashMap<String, String>();
+ for (FieldSchema fs : tab.getPartitionKeys()) {
+ colTypes.put(fs.getName().toLowerCase(), fs.getType());
+ }
for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
Tree partSpecTree = ast.getChild(childIndex);
- if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) {
- PartitionSpec partSpec = new PartitionSpec();
-
- for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
- CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
- assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
- String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
- String operator = partSpecSingleKey.getChild(1).getText();
- String val = partSpecSingleKey.getChild(2).getText();
- partSpec.addPredicate(key, operator, val);
- }
-
- partSpecList.add(partSpec);
+ if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) continue;
+ ExprNodeGenericFuncDesc expr = null;
+ List<String> names = new ArrayList<String>(partSpecTree.getChildCount());
+ for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
+ CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
+ assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
+ String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
+ String operator = partSpecSingleKey.getChild(1).getText();
+ String val = stripQuotes(partSpecSingleKey.getChild(2).getText());
+
+ String type = colTypes.get(key);
+ if (type == null) {
+ throw new SemanticException("Column " + key + " not found");
+ }
+ ExprNodeColumnDesc column = new ExprNodeColumnDesc(
+ TypeInfoFactory.getPrimitiveTypeInfo(type), key, null, true);
+ ExprNodeGenericFuncDesc op = new ExprNodeGenericFuncDesc(
+ TypeInfoFactory.booleanTypeInfo,
+ FunctionRegistry.getFunctionInfo(operator).getGenericUDF(),
+ Lists.newArrayList(column, new ExprNodeConstantDesc(val)));
+ expr = (expr == null) ? op : new ExprNodeGenericFuncDesc(
+ TypeInfoFactory.booleanTypeInfo,
+ FunctionRegistry.getGenericUDFForAnd(),
+ Lists.<ExprNodeDesc>newArrayList(expr, op));
+ names.add(key);
+ }
+ if (expr != null) {
+ exprs.add(expr);
+ cols.add(names);
}
}
- return partSpecList;
}
/**
@@ -2857,34 +2859,31 @@ public class DDLSemanticAnalyzer extends
* pre-execution hook. If the partition does not exist, throw an error if
* throwIfNonExistent is true, otherwise ignore it.
*/
- private void addTableDropPartsOutputs(String tblName, List<PartitionSpec>
partSpecs,
- boolean throwIfNonExistent, boolean stringPartitionColumns, boolean
ignoreProtection)
+ private void addTableDropPartsOutputs(String tblName,
List<ExprNodeGenericFuncDesc> partSpecs,
+ boolean throwIfNonExistent, boolean ignoreProtection)
throws SemanticException {
Table tab = getTable(tblName);
- Iterator<PartitionSpec> i;
- int index;
- for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
- PartitionSpec partSpec = i.next();
- List<Partition> parts = null;
- if (stringPartitionColumns) {
- try {
- parts = db.getPartitionsByFilter(tab, partSpec.toString());
- } catch (Exception e) {
- throw new
SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
- }
+ Iterator<ExprNodeGenericFuncDesc> i = partSpecs.iterator();
+ while (i.hasNext()) {
+ ExprNodeGenericFuncDesc partSpec = i.next();
+ List<Partition> parts = new ArrayList<Partition>();
+ boolean hasUnknown = false;
+ try {
+ hasUnknown = db.getPartitionsByExpr(tab, partSpec, conf, parts);
+ } catch (Exception e) {
+ throw new SemanticException(
+ ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()), e);
}
- else {
- try {
- parts = db.getPartitions(tab, partSpec.getPartSpecWithoutOperator());
- } catch (Exception e) {
- throw new
SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
- }
+ if (hasUnknown) {
+ throw new SemanticException(
+ "Unexpected unknown partitions for " + partSpec.getExprString());
}
if (parts.isEmpty()) {
if (throwIfNonExistent) {
- throw new
SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
+ throw new SemanticException(
+ ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()));
}
}
for (Partition p : parts) {
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java?rev=1565702&r1=1565701&r2=1565702&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
Fri Feb 7 16:02:41 2014
@@ -30,14 +30,28 @@ import java.util.List;
public class DropTableDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
+ public static class PartSpec {
+ public PartSpec(ExprNodeGenericFuncDesc partSpec, ArrayList<String>
partSpecKeys) {
+ this.partSpec = partSpec;
+ this.partSpecKeys = partSpecKeys;
+ }
+ public ExprNodeGenericFuncDesc getPartSpec() {
+ return partSpec;
+ }
+ public ArrayList<String> getPartSpecKeys() {
+ return partSpecKeys;
+ }
+ private static final long serialVersionUID = 1L;
+ private ExprNodeGenericFuncDesc partSpec;
+ // TODO: see if we can get rid of this... used in one place to distinguish
archived parts
+ private ArrayList<String> partSpecKeys;
+ }
+
String tableName;
- ArrayList<PartitionSpec> partSpecs;
+ ArrayList<PartSpec> partSpecs;
boolean expectView;
boolean ifExists;
boolean ignoreProtection;
- boolean stringPartitionColumns; // This is due to JDO not working very well
with
- // non-string partition columns.
- // We need a different codepath for them
public DropTableDesc() {
}
@@ -45,28 +59,25 @@ public class DropTableDesc extends DDLDe
/**
* @param tableName
*/
- public DropTableDesc(String tableName, boolean expectView,
- boolean ifExists, boolean stringPartitionColumns) {
+ public DropTableDesc(String tableName, boolean expectView, boolean ifExists)
{
this.tableName = tableName;
- partSpecs = null;
+ this.partSpecs = null;
this.expectView = expectView;
this.ifExists = ifExists;
this.ignoreProtection = false;
- this.stringPartitionColumns = stringPartitionColumns;
}
- public DropTableDesc(String tableName, List<PartitionSpec> partSpecs,
- boolean expectView, boolean stringPartitionColumns,
- boolean ignoreProtection) {
-
+ public DropTableDesc(String tableName, List<ExprNodeGenericFuncDesc>
partSpecs,
+ List<List<String>> partSpecKeys, boolean expectView, boolean
ignoreProtection) {
this.tableName = tableName;
- this.partSpecs = new ArrayList<PartitionSpec>(partSpecs.size());
- for (int i = 0; i < partSpecs.size(); i++) {
- this.partSpecs.add(partSpecs.get(i));
+ assert partSpecs.size() == partSpecKeys.size();
+ this.partSpecs = new ArrayList<PartSpec>(partSpecs.size());
+ for (int i = 0; i < partSpecs.size(); ++i) {
+ this.partSpecs.add(new PartSpec(
+ partSpecs.get(i), new ArrayList<String>(partSpecKeys.get(i))));
}
this.ignoreProtection = ignoreProtection;
this.expectView = expectView;
- this.stringPartitionColumns = stringPartitionColumns;
}
/**
@@ -88,19 +99,11 @@ public class DropTableDesc extends DDLDe
/**
* @return the partSpecs
*/
- public ArrayList<PartitionSpec> getPartSpecs() {
+ public ArrayList<PartSpec> getPartSpecs() {
return partSpecs;
}
/**
- * @param partSpecs
- * the partSpecs to set
- */
- public void setPartSpecs(ArrayList<PartitionSpec> partSpecs) {
- this.partSpecs = partSpecs;
- }
-
- /**
* @return whether or not protection will be ignored for the partition
*/
public boolean getIgnoreProtection() {
@@ -144,12 +147,4 @@ public class DropTableDesc extends DDLDe
public void setIfExists(boolean ifExists) {
this.ifExists = ifExists;
}
-
- public boolean isStringPartitionColumns() {
- return stringPartitionColumns;
- }
-
- public void setStringPartitionColumns(boolean stringPartitionColumns) {
- this.stringPartitionColumns = stringPartitionColumns;
- }
}
Modified:
hive/trunk/ql/src/test/results/clientnegative/drop_partition_failure.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/drop_partition_failure.q.out?rev=1565702&r1=1565701&r2=1565702&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/drop_partition_failure.q.out
(original)
+++ hive/trunk/ql/src/test/results/clientnegative/drop_partition_failure.q.out
Fri Feb 7 16:02:41 2014
@@ -31,4 +31,4 @@ POSTHOOK: type: SHOWPARTITIONS
b=1/c=1
b=1/c=2
b=2/c=2
-FAILED: SemanticException [Error 10006]: Partition not found b = '3'
+FAILED: SemanticException [Error 10006]: Partition not found (b = '3')
Modified:
hive/trunk/ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out?rev=1565702&r1=1565701&r2=1565702&view=diff
==============================================================================
---
hive/trunk/ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out
(original)
+++
hive/trunk/ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out
Fri Feb 7 16:02:41 2014
@@ -15,4 +15,4 @@ PREHOOK: type: SHOWPARTITIONS
POSTHOOK: query: show partitions ptestfilter1
POSTHOOK: type: SHOWPARTITIONS
c=US/d=1
-FAILED: SemanticException [Error 10006]: Partition not found c = 'US' AND d < 1
+FAILED: SemanticException [Error 10006]: Partition not found ((c = 'US') and
(d < '1'))