Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java (original) +++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java Mon May 27 05:27:15 2013 @@ -48,7 +48,7 @@ public class AvgPartitionSizeBasedBigTab public int getBigTablePosition(ParseContext parseCtx, JoinOperator joinOp) throws SemanticException { int bigTablePos = 0; - long maxSize = 0; + long maxSize = -1; int numPartitionsCurrentBigTable = 0; // number of partitions for the chosen big table HiveConf conf = parseCtx.getConf(); @@ -79,7 +79,7 @@ public class AvgPartitionSizeBasedBigTab for (Partition part : partsList.getNotDeniedPartns()) { totalSize += getSize(conf, part); } - averageSize = totalSize/numPartitions; + averageSize = numPartitions == 0 ? 0 : totalSize/numPartitions; } if (averageSize > maxSize) {
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TableSizeBasedBigTableSelectorForAutoSMJ.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TableSizeBasedBigTableSelectorForAutoSMJ.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TableSizeBasedBigTableSelectorForAutoSMJ.java (original) +++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TableSizeBasedBigTableSelectorForAutoSMJ.java Mon May 27 05:27:15 2013 @@ -41,7 +41,7 @@ implements BigTableSelectorForAutoSMJ { public int getBigTablePosition(ParseContext parseCtx, JoinOperator joinOp) throws SemanticException { int bigTablePos = 0; - long maxSize = 0; + long maxSize = -1; HiveConf conf = parseCtx.getConf(); try { Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java (original) +++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java Mon May 27 05:27:15 2013 @@ -466,7 +466,7 @@ public class CommonJoinTaskDispatcher ex HiveConf.ConfVars.HIVECONVERTJOINNOCONDITIONALTASKTHRESHOLD); boolean bigTableFound = false; - long largestBigTableCandidateSize = 0; + long largestBigTableCandidateSize = -1; long sumTableSizes = 0; for (String alias : aliasToWork.keySet()) { int tablePosition = getPosition(currWork, joinOp, alias); Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original) +++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Mon May 27 05:27:15 2013 @@ -64,7 +64,6 @@ import org.apache.hadoop.hive.ql.plan.Li import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; @@ -121,7 +120,6 @@ public abstract class BaseSemanticAnalyz .getName(); protected static final String ORCFILE_SERDE = OrcSerde.class .getName(); - protected static final String COLUMNAR_SERDE = ColumnarSerDe.class.getName(); class RowFormatParams { String fieldDelim = null; @@ -195,7 +193,7 @@ public abstract class BaseSemanticAnalyz inputFormat = RCFILE_INPUT; outputFormat = RCFILE_OUTPUT; if (shared.serde == null) { - shared.serde = COLUMNAR_SERDE; + shared.serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE); } storageFormat = true; break; @@ -231,7 +229,7 @@ public abstract class BaseSemanticAnalyz } else if ("RCFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) { inputFormat = RCFILE_INPUT; outputFormat = RCFILE_OUTPUT; - shared.serde = COLUMNAR_SERDE; + shared.serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE); } else if ("ORC".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) { inputFormat = ORCFILE_INPUT; outputFormat = ORCFILE_OUTPUT; Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original) +++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon May 27 05:27:15 2013 @@ -1350,7 +1350,7 @@ public class DDLSemanticAnalyzer extends case HiveParser.TOK_TBLRCFILE: inputFormat = RCFILE_INPUT; outputFormat = RCFILE_OUTPUT; - serde = COLUMNAR_SERDE; + serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE); break; case HiveParser.TOK_TBLORCFILE: inputFormat = ORCFILE_INPUT; Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original) +++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Mon May 27 05:27:15 2013 @@ -22,16 +22,18 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; +import java.lang.management.ManagementFactory; import java.net.URI; +import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Calendar; -import java.util.GregorianCalendar; +import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -57,6 +59,7 @@ import org.apache.hadoop.hive.ql.util.Do * configuration information */ public class SessionState { + private static final Log LOG = LogFactory.getLog(SessionState.class); /** * current configuration. @@ -179,18 +182,21 @@ public class SessionState { this.isVerbose = isVerbose; } - public SessionState() { - this(null); - } - public SessionState(HiveConf conf) { this.conf = conf; isSilent = conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT); ls = new LineageState(); overriddenConfigurations = new HashMap<String, String>(); overriddenConfigurations.putAll(HiveConf.getConfSystemProperties()); + // if there isn't already a session name, go ahead and create it. + if (StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HIVESESSIONID))) { + conf.setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId()); + } } + private static final SimpleDateFormat DATE_FORMAT = + new SimpleDateFormat("yyyyMMddHHmm"); + public void setCmd(String cmdString) { conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, cmdString); } @@ -242,12 +248,6 @@ public class SessionState { tss.set(startSs); - if (StringUtils.isEmpty(startSs.getConf().getVar( - HiveConf.ConfVars.HIVESESSIONID))) { - startSs.getConf() - .setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId()); - } - if (startSs.hiveHist == null) { startSs.hiveHist = new HiveHistory(startSs); } @@ -297,15 +297,15 @@ public class SessionState { return hiveHist; } + /** + * Create a session ID. Looks like: + * $user_$pid@$host_$date + * @return the unique string + */ private static String makeSessionId() { - GregorianCalendar gc = new GregorianCalendar(); String userid = System.getProperty("user.name"); - - return userid - + "_" - + String.format("%1$4d%2$02d%3$02d%4$02d%5$02d", gc.get(Calendar.YEAR), - gc.get(Calendar.MONTH) + 1, gc.get(Calendar.DAY_OF_MONTH), gc - .get(Calendar.HOUR_OF_DAY), gc.get(Calendar.MINUTE)); + return userid + "_" + ManagementFactory.getRuntimeMXBean().getName() + "_" + + DATE_FORMAT.format(new Date()); } /** @@ -588,35 +588,15 @@ public class SessionState { private String downloadResource(String value, boolean convertToUnix) { if (canDownloadResource(value)) { getConsole().printInfo("converting to local " + value); - String location = getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR); - + File resourceDir = new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR)); String destinationName = new Path(value).getName(); - String prefix = destinationName; - String postfix = null; - int index = destinationName.lastIndexOf("."); - if (index > 0) { - prefix = destinationName.substring(0, index); - postfix = destinationName.substring(index); - } - if (prefix.length() < 3) { - prefix += ".tmp"; // prefix should be longer than 3 - } - - File resourceDir = new File(location); - if (resourceDir.exists() && !resourceDir.isDirectory()) { - throw new RuntimeException("The resource directory is not a directory, " + - "resourceDir is set to " + resourceDir); + File destinationFile = new File(resourceDir, destinationName); + if (resourceDir.exists() && ! resourceDir.isDirectory()) { + throw new RuntimeException("The resource directory is not a directory, resourceDir is set to" + resourceDir); } if (!resourceDir.exists() && !resourceDir.mkdirs()) { throw new RuntimeException("Couldn't create directory " + resourceDir); } - - File destinationFile; - try { - destinationFile = File.createTempFile(prefix, postfix, resourceDir); - } catch (Exception e) { - throw new RuntimeException("Failed to create temporary file for " + value, e); - } try { FileSystem fs = FileSystem.get(new URI(value), conf); fs.copyToLocalFile(new Path(value), new Path(destinationFile.getCanonicalPath())); @@ -756,4 +736,17 @@ public class SessionState { public void setLocalMapRedErrors(Map<String, List<String>> localMapRedErrors) { this.localMapRedErrors = localMapRedErrors; } + + public void close() throws IOException { + File resourceDir = + new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR)); + LOG.debug("Removing resource dir " + resourceDir); + try { + if (resourceDir.exists()) { + FileUtils.deleteDirectory(resourceDir); + } + } catch (IOException e) { + LOG.info("Error removing session resource dir " + resourceDir, e); + } + } } Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original) +++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Mon May 27 05:27:15 2013 @@ -50,6 +50,7 @@ public abstract class GenericUDF impleme * GenericUDF use DeferedObject to pass arguments. */ public static interface DeferredObject { + void prepare(int version) throws HiveException; Object get() throws HiveException; }; @@ -65,6 +66,10 @@ public abstract class GenericUDF impleme } @Override + public void prepare(int version) throws HiveException { + } + + @Override public Object get() throws HiveException { return value; } Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original) +++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Mon May 27 05:27:15 2013 @@ -1017,7 +1017,8 @@ public class QTestUtil { ".*LOCK_TIME:.*", ".*grantTime.*", ".*[.][.][.] [0-9]* more.*", - ".*job_[0-9]*_[0-9]*.*", + ".*job_[0-9_]*.*", + ".*job_local[0-9_]*.*", ".*USING 'java -cp.*", "^Deleted.*", }; Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rcfile_default_format.q URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rcfile_default_format.q?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rcfile_default_format.q (original) +++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rcfile_default_format.q Mon May 27 05:27:15 2013 @@ -1,19 +1,31 @@ SET hive.default.fileformat = RCFile; CREATE TABLE rcfile_default_format (key STRING); -DESCRIBE EXTENDED rcfile_default_format; +DESCRIBE FORMATTED rcfile_default_format; CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src; -DESCRIBE EXTENDED rcfile_default_format_ctas; +DESCRIBE FORMATTED rcfile_default_format_ctas; CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE; INSERT OVERWRITE TABLE rcfile_default_format_txtfile SELECT key from src; -DESCRIBE EXTENDED rcfile_default_format_txtfile; +DESCRIBE FORMATTED rcfile_default_format_txtfile; SET hive.default.fileformat = TextFile; CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas; -DESCRIBE EXTENDED textfile_default_format_ctas; +DESCRIBE FORMATTED textfile_default_format_ctas; +SET hive.default.fileformat = RCFile; +SET hive.default.rcfile.serde = org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe; +CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas; +DESCRIBE FORMATTED rcfile_default_format_ctas_default_serde; + +CREATE TABLE rcfile_default_format_default_serde (key STRING); +DESCRIBE FORMATTED rcfile_default_format_default_serde; +SET hive.default.fileformat = TextFile; +CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas; +DESCRIBE FORMATTED rcfile_ctas_default_serde; +CREATE TABLE rcfile_default_serde (key STRING) STORED AS rcfile; +DESCRIBE FORMATTED rcfile_default_serde; Modified: hive/branches/HIVE-4115/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out (original) +++ hive/branches/HIVE-4115/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out Mon May 27 05:27:15 2013 @@ -19,6 +19,5 @@ org.apache.hadoop.hive.ql.metadata.HiveE #### A masked pattern was here #### org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script. #### A masked pattern was here #### -Ended Job = job_local_0001 with errors Error during job, obtaining debugging information... FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/rcfile_default_format.q.out URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/rcfile_default_format.q.out?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/rcfile_default_format.q.out (original) +++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/rcfile_default_format.q.out Mon May 27 05:27:15 2013 @@ -3,13 +3,34 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE rcfile_default_format (key STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@rcfile_default_format -PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format +PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format PREHOOK: type: DESCTABLE -POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format +POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format POSTHOOK: type: DESCTABLE +# col_name data_type comment + key string None +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: #### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src @@ -17,14 +38,40 @@ POSTHOOK: query: CREATE TABLE rcfile_def POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: default@rcfile_default_format_ctas -PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format_ctas +PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas PREHOOK: type: DESCTABLE -POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format_ctas +POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas POSTHOOK: type: DESCTABLE +# col_name data_type comment + key string None value string None +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 500 + rawDataSize 4812 + totalSize 5293 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE @@ -39,14 +86,40 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@rcfile_default_format_txtfile POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile +PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_txtfile PREHOOK: type: DESCTABLE -POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile +POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_txtfile POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +# col_name data_type comment + key string None +# Detailed Table Information +Database: default #### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 500 + rawDataSize 1406 + totalSize 1906 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@rcfile_default_format_ctas @@ -55,12 +128,194 @@ POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@rcfile_default_format_ctas POSTHOOK: Output: default@textfile_default_format_ctas POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -PREHOOK: query: DESCRIBE EXTENDED textfile_default_format_ctas +PREHOOK: query: DESCRIBE FORMATTED textfile_default_format_ctas PREHOOK: type: DESCTABLE -POSTHOOK: query: DESCRIBE EXTENDED textfile_default_format_ctas +POSTHOOK: query: DESCRIBE FORMATTED textfile_default_format_ctas POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +# col_name data_type comment + key string None value string None +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@rcfile_default_format_ctas +POSTHOOK: query: CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@rcfile_default_format_ctas +POSTHOOK: Output: default@rcfile_default_format_ctas_default_serde +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas_default_serde +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas_default_serde +POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +# col_name data_type comment + +key string from deserializer +value string from deserializer + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 500 + rawDataSize 4812 + totalSize 5293 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: CREATE TABLE rcfile_default_format_default_serde (key STRING) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE rcfile_default_format_default_serde (key STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@rcfile_default_format_default_serde +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_default_serde +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_default_serde +POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +# col_name data_type comment + +key string from deserializer + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@rcfile_default_format_ctas +POSTHOOK: query: CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@rcfile_default_format_ctas +POSTHOOK: Output: default@rcfile_ctas_default_serde +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +PREHOOK: query: DESCRIBE FORMATTED rcfile_ctas_default_serde +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED rcfile_ctas_default_serde +POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +# col_name data_type comment + +key string from deserializer +value string from deserializer + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 #### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numPartitions 0 + numRows 500 + rawDataSize 4812 + totalSize 5293 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: CREATE TABLE rcfile_default_serde (key STRING) STORED AS rcfile +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE rcfile_default_serde (key STRING) STORED AS rcfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@rcfile_default_serde +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +PREHOOK: query: DESCRIBE FORMATTED rcfile_default_serde +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_serde +POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +# col_name data_type comment + +key string from deserializer + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Protect Mode: None +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 Modified: hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java (original) +++ hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java Mon May 27 05:27:15 2013 @@ -54,12 +54,5 @@ public class TUGIContainingProcessor imp } catch (IOException ioe) { throw new RuntimeException(ioe); // unexpected! } - finally { - // cleanup the filesystem handles at the end if they are cached - // clientUgi will be null if createRemoteUser() fails - if (clientUgi != null && !isFsCacheDisabled) { - shim.closeAllForUGI(clientUgi); - } - } } } Modified: hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1486517&r1=1486516&r2=1486517&view=diff ============================================================================== --- hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java (original) +++ hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java Mon May 27 05:27:15 2013 @@ -18,13 +18,19 @@ package org.apache.hive.service.cli.session; +import java.io.File; +import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import org.apache.commons.io.FileUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -64,6 +70,8 @@ public class HiveSessionImpl implements private static final String FETCH_WORK_SERDE_CLASS = "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; + private static final Log LOG = LogFactory.getLog(HiveSessionImpl.class); + private SessionManager sessionManager; private OperationManager operationManager; @@ -79,7 +87,9 @@ public class HiveSessionImpl implements hiveConf.set(entry.getKey(), entry.getValue()); } } - + // set an explicit session name to control the download directory name + hiveConf.set(ConfVars.HIVESESSIONID.varname, + sessionHandle.getHandleIdentifier().toString()); sessionState = new SessionState(hiveConf); } @@ -300,8 +310,11 @@ public class HiveSessionImpl implements if (null != hiveHist) { hiveHist.closeStream(); } - } finally { + sessionState.close(); + release(); + } catch (IOException ioe) { release(); + throw new HiveSQLException("Failure to close", ioe); } }
