Author: namit
Date: Mon Nov  7 17:54:48 2011
New Revision: 1198841

URL: http://svn.apache.org/viewvc?rev=1198841&view=rev
Log:
HIVE-2178 Log related Check style Comments fixes
(Chinna Rao Lalam via namit)


Modified:
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
    
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java
    
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java

Modified: 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java?rev=1198841&r1=1198840&r2=1198841&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java 
(original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java 
Mon Nov  7 17:54:48 2011
@@ -76,7 +76,7 @@ public class HiveConnection implements j
         client = new HiveServer.HiveServerHandler();
       } catch (MetaException e) {
         throw new SQLException("Error accessing Hive metastore: "
-            + e.getMessage(), "08S01");
+            + e.getMessage(), "08S01",e);
       }
     } else {
       // parse uri

Modified: 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java?rev=1198841&r1=1198840&r2=1198841&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java 
(original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java 
Mon Nov  7 17:54:48 2011
@@ -58,7 +58,7 @@ public class HiveDataSource implements D
     try {
       return new HiveConnection("", null);
     } catch (Exception ex) {
-      throw new SQLException();
+      throw new SQLException("Error in getting HiveConnection",ex);
     }
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1198841&r1=1198840&r2=1198841&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Mon Nov  
7 17:54:48 2011
@@ -34,14 +34,14 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.HashMap;
+import java.util.Map.Entry;
 import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
-import java.util.Map.Entry;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -98,6 +98,7 @@ import org.apache.hadoop.hive.ql.plan.Ad
 import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
+import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
 import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
@@ -131,7 +132,6 @@ import org.apache.hadoop.hive.ql.plan.Sh
 import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
 import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
-import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.serde.Constants;
@@ -837,7 +837,7 @@ public class DDLTask extends Task<DDLWor
     Index idx = db.getIndex(dbName, baseTableName, indexName);
 
     switch(alterIndex.getOp()) {
-      case ADDPROPS: 
+      case ADDPROPS:
         idx.getParameters().putAll(alterIndex.getProps());
         break;
       case UPDATETIMESTAMP:
@@ -1819,12 +1819,12 @@ public class DDLTask extends Task<DDLWor
       outStream = null;
     } catch (FileNotFoundException e) {
       LOG.info("show partitions: " + stringifyException(e));
-      throw new HiveException(e.toString());
+      throw new HiveException(e);
     } catch (IOException e) {
       LOG.info("show partitions: " + stringifyException(e));
-      throw new HiveException(e.toString());
+      throw new HiveException(e);
     } catch (Exception e) {
-      throw new HiveException(e.toString());
+      throw new HiveException(e);
     } finally {
       IOUtils.closeStream((FSDataOutputStream) outStream);
     }
@@ -2039,7 +2039,7 @@ public class DDLTask extends Task<DDLWor
       LOG.warn("show function: " + stringifyException(e));
       return 1;
     } catch (Exception e) {
-      throw new HiveException(e.toString());
+      throw new HiveException(e);
     } finally {
       IOUtils.closeStream((FSDataOutputStream) outStream);
     }
@@ -2300,7 +2300,7 @@ public class DDLTask extends Task<DDLWor
       LOG.warn("describe function: " + stringifyException(e));
       return 1;
     } catch (Exception e) {
-      throw new HiveException(e.toString());
+      throw new HiveException(e);
     } finally {
       IOUtils.closeStream((FSDataOutputStream) outStream);
     }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=1198841&r1=1198840&r2=1198841&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java 
Mon Nov  7 17:54:48 2011
@@ -151,7 +151,7 @@ public class Partition implements Serial
       sd.read(prot);
     } catch (TException e) {
       LOG.error("Could not create a copy of StorageDescription");
-      throw new HiveException("Could not create a copy of StorageDescription");
+      throw new HiveException("Could not create a copy of 
StorageDescription",e);
     }
 
     tpart.setSd(sd);

Modified: 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java?rev=1198841&r1=1198840&r2=1198841&view=diff
==============================================================================
--- 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java
 (original)
+++ 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java
 Mon Nov  7 17:54:48 2011
@@ -76,7 +76,7 @@ public class SimpleCharStream {
         maxNextCharInd = (bufpos -= tokenBegin);
       }
     } catch (Throwable t) {
-      throw new Error(t.getMessage());
+      throw new Error("Error in ExpandBuff",t);
     }
 
     bufsize += 2048;

Modified: 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java?rev=1198841&r1=1198840&r2=1198841&view=diff
==============================================================================
--- 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
 (original)
+++ 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
 Mon Nov  7 17:54:48 2011
@@ -41,7 +41,7 @@ import org.apache.thrift.transport.TTran
 
 /**
  * An implementation of the Thrift Protocol for binary sortable records.
- * 
+ *
  * The data format: NULL: a single byte \0 NON-NULL Primitives: ALWAYS prepend 
a
  * single byte \1, and then: Boolean: FALSE = \1, TRUE = \2 Byte: flip the
  * sign-bit to make sure negative comes before positive Short: flip the 
sign-bit
@@ -55,16 +55,16 @@ import org.apache.thrift.transport.TTran
  * as Int (see above), then one key by one value, and then the next pair and so
  * on. Binary: size stored as Int (see above), then the binary data in its
  * original form
- * 
+ *
  * Note that the relative order of list/map/binary will be based on the size
  * first (and elements one by one if the sizes are equal).
- * 
+ *
  * This protocol takes an additional parameter SERIALIZATION_SORT_ORDER which 
is
  * a string containing only "+" and "-". The length of the string should equal
  * to the number of fields in the top-level struct for serialization. "+" means
  * the field should be sorted ascendingly, and "-" means descendingly. The sub
  * fields in the same top-level field will have the same sort order.
- * 
+ *
  * This is not thrift compliant in that it doesn't write out field ids so 
things
  * cannot actually be versioned.
  */
@@ -340,7 +340,7 @@ public class TBinarySortableProtocol ext
     try {
       dat = str.getBytes("UTF-8");
     } catch (UnsupportedEncodingException uex) {
-      throw new TException("JVM DOES NOT SUPPORT UTF-8: " + uex.getMessage());
+      throw new TException("JVM DOES NOT SUPPORT UTF-8: ",uex);
     }
     writeTextBytes(dat, 0, dat.length);
   }
@@ -635,7 +635,7 @@ public class TBinarySortableProtocol ext
       String r = new String(stringBytes, 0, i, "UTF-8");
       return r;
     } catch (UnsupportedEncodingException uex) {
-      throw new TException("JVM DOES NOT SUPPORT UTF-8: " + uex.getMessage());
+      throw new TException("JVM DOES NOT SUPPORT UTF-8: ",uex);
     }
   }
 


Reply via email to