Repository: hive
Updated Branches:
  refs/heads/master 2435e702d -> d729b4544


HIVE-14146: Column comments with "\n" character "corrupts" table metadata 
(Peter Vary, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d729b454
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d729b454
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d729b454

Branch: refs/heads/master
Commit: d729b45446dd4eef5a06c71ee4c4be9ee37070d9
Parents: 2435e70
Author: Aihua Xu <aihu...@apache.org>
Authored: Mon Oct 10 10:16:43 2016 -0400
Committer: Aihua Xu <aihu...@apache.org>
Committed: Mon Oct 10 10:16:43 2016 -0400

----------------------------------------------------------------------
 .../hive/common/util/HiveStringUtils.java       |  23 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  41 ++--
 .../formatting/MetaDataFormatUtils.java         | 156 +++++++++++---
 .../formatting/TextMetaDataFormatter.java       |   5 +-
 .../queries/clientpositive/escape_comments.q    |  20 ++
 .../alter_view_as_select_with_partition.q.out   |  10 +-
 .../alter_table_invalidate_column_stats.q.out   |  24 +--
 .../clientpositive/alter_view_as_select.q.out   |  14 +-
 .../columnstats_part_coltype.q.out              |  16 +-
 .../results/clientpositive/create_like.q.out    |  46 +++-
 .../results/clientpositive/create_view.q.out    |  72 +++----
 .../create_view_partitioned.q.out               |  20 +-
 .../clientpositive/create_view_translate.q.out  |   8 +-
 .../create_with_constraints.q.out               |  36 ++--
 .../describe_comment_indent.q.out               |  15 +-
 .../clientpositive/escape_comments.q.out        | 213 +++++++++++++++++++
 16 files changed, 558 insertions(+), 161 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java 
b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
index 72c3fa9..507e369 100644
--- a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
+++ b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
@@ -77,6 +77,15 @@ public class HiveStringUtils {
       }).with(
         new LookupTranslator(EntityArrays.JAVA_CTRL_CHARS_ESCAPE()));
 
+  private static final CharSequenceTranslator ESCAPE_HIVE_COMMAND =
+      new LookupTranslator(
+        new String[][] {
+          {"'", "\\'"},
+          {";", "\\;"},
+          {"\\", "\\\\"},
+      }).with(
+        new LookupTranslator(EntityArrays.JAVA_CTRL_CHARS_ESCAPE()));
+
   /**
    * Maintain a String pool to reduce memory.
    */
@@ -622,7 +631,19 @@ public class HiveStringUtils {
    */
   public static String escapeJava(String str) {
     return ESCAPE_JAVA.translate(str);
-}
+  }
+
+  /**
+   * Escape non-unicode characters, and ', and ;
+   * Like StringEscapeUtil.escapeJava() will escape
+   * unicode characters as well but in some cases it's not desired.
+   *
+   * @param str Original string
+   * @return Escaped string
+   */
+  public static String escapeHiveCommand(String str) {
+    return ESCAPE_HIVE_COMMAND.translate(str);
+  }
 
   /**
    * Unescape commas in the string using the default escape char

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index cea8ce8..362ea32 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -50,7 +50,6 @@ import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
-import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -232,6 +231,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.tools.HadoopArchives;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hive.common.util.AnnotationUtils;
+import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hive.common.util.ReflectionUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -2052,7 +2052,8 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     createDb_str.append("CREATE DATABASE 
`").append(database.getName()).append("`\n");
     if (database.getDescription() != null) {
       createDb_str.append("COMMENT\n  '");
-      
createDb_str.append(escapeHiveCommand(database.getDescription())).append("'\n");
+      createDb_str.append(
+          
HiveStringUtils.escapeHiveCommand(database.getDescription())).append("'\n");
     }
     createDb_str.append("LOCATION\n  '");
     createDb_str.append(database.getLocationUri()).append("'\n");
@@ -2150,7 +2151,8 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
       for (FieldSchema col : cols) {
         String columnDesc = "  `" + col.getName() + "` " + col.getType();
         if (col.getComment() != null) {
-          columnDesc = columnDesc + " COMMENT '" + 
escapeHiveCommand(col.getComment()) + "'";
+          columnDesc = columnDesc + " COMMENT '"
+              + HiveStringUtils.escapeHiveCommand(col.getComment()) + "'";
         }
         columns.add(columnDesc);
       }
@@ -2161,7 +2163,8 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
       String tabComment = tbl.getProperty("comment");
       if (tabComment != null) {
         duplicateProps.add("comment");
-        tbl_comment = "COMMENT '" + escapeHiveCommand(tabComment) + "'";
+        tbl_comment = "COMMENT '"
+            + HiveStringUtils.escapeHiveCommand(tabComment) + "'";
       }
 
       // Partitions
@@ -2173,8 +2176,8 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
         for (FieldSchema partKey : partKeys) {
           String partColDesc = "  `" + partKey.getName() + "` " + 
partKey.getType();
           if (partKey.getComment() != null) {
-            partColDesc = partColDesc + " COMMENT '" +
-                escapeHiveCommand(partKey.getComment()) + "'";
+            partColDesc = partColDesc + " COMMENT '"
+                + HiveStringUtils.escapeHiveCommand(partKey.getComment()) + 
"'";
           }
           partCols.add(partColDesc);
         }
@@ -2217,7 +2220,8 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
       SerDeInfo serdeInfo = sd.getSerdeInfo();
       Map<String, String> serdeParams = serdeInfo.getParameters();
       tbl_row_format.append("ROW FORMAT SERDE \n");
-      tbl_row_format.append("  '" + 
escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n");
+      tbl_row_format.append("  '"
+          + HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib()) 
+ "' \n");
       if (tbl.getStorageHandler() == null) {
         // If serialization.format property has the default value, it will not 
to be included in
         // SERDE properties
@@ -2228,20 +2232,21 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
         if (!serdeParams.isEmpty()) {
           appendSerdeParams(tbl_row_format, serdeParams).append(" \n");
         }
-        tbl_row_format.append("STORED AS INPUTFORMAT \n  '" +
-            escapeHiveCommand(sd.getInputFormat()) + "' \n");
-        tbl_row_format.append("OUTPUTFORMAT \n  '" +
-            escapeHiveCommand(sd.getOutputFormat()) + "'");
+        tbl_row_format.append("STORED AS INPUTFORMAT \n  '"
+            + HiveStringUtils.escapeHiveCommand(sd.getInputFormat()) + "' \n");
+        tbl_row_format.append("OUTPUTFORMAT \n  '"
+            + HiveStringUtils.escapeHiveCommand(sd.getOutputFormat()) + "'");
       } else {
         duplicateProps.add(META_TABLE_STORAGE);
-        tbl_row_format.append("STORED BY \n  '" + 
escapeHiveCommand(tbl.getParameters().get(
+        tbl_row_format.append("STORED BY \n  '"
+            + HiveStringUtils.escapeHiveCommand(tbl.getParameters().get(
             META_TABLE_STORAGE)) + "' \n");
         // SerDe Properties
         if (!serdeParams.isEmpty()) {
           appendSerdeParams(tbl_row_format, serdeInfo.getParameters());
         }
       }
-      String tbl_location = "  '" + escapeHiveCommand(sd.getLocation()) + "'";
+      String tbl_location = "  '" + 
HiveStringUtils.escapeHiveCommand(sd.getLocation()) + "'";
 
       // Table properties
       
duplicateProps.addAll(Arrays.asList(StatsSetupConst.TABLE_PARAMS_STATS_KEYS));
@@ -2277,7 +2282,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
       for (String key : properties.keySet()) {
         if (properties.get(key) != null && (exclude == null || 
!exclude.contains(key))) {
           realProps.add("  '" + key + "'='" +
-              
escapeHiveCommand(StringEscapeUtils.escapeJava(properties.get(key))) + "'");
+              HiveStringUtils.escapeHiveCommand(properties.get(key)) + "'");
         }
       }
       prop_string += StringUtils.join(realProps, ", \n");
@@ -2291,7 +2296,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     List<String> serdeCols = new ArrayList<String>();
     for (Entry<String, String> entry : serdeParam.entrySet()) {
       serdeCols.add("  '" + entry.getKey() + "'='"
-          + escapeHiveCommand(StringEscapeUtils.escapeJava(entry.getValue())) 
+ "'");
+          + HiveStringUtils.escapeHiveCommand(entry.getValue()) + "'");
     }
     builder.append(StringUtils.join(serdeCols, ", \n")).append(')');
     return builder;
@@ -2318,6 +2323,10 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
 
     indexes = db.getIndexes(tbl.getDbName(), tbl.getTableName(), (short) -1);
 
+    // In case the query is served by HiveServer2, don't pad it with spaces,
+    // as HiveServer2 output is consumed by JDBC/ODBC clients.
+    boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
+
     // write the results in the file
     DataOutputStream outStream = getOutputStream(showIndexes.getResFile());
     try {
@@ -2330,7 +2339,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
 
       for (Index index : indexes)
       {
-        
outStream.write(MetaDataFormatUtils.getAllColumnsInformation(index).getBytes(StandardCharsets.UTF_8));
+        outStream.write(MetaDataFormatUtils.getIndexInformation(index, 
isOutputPadded).getBytes(StandardCharsets.UTF_8));
       }
     } catch (FileNotFoundException e) {
       LOG.info("show indexes: " + stringifyException(e));

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
index ba4f6a7..c850e43 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
@@ -111,7 +111,6 @@ public final class MetaDataFormatUtils {
    * @param printHeader - if header should be included
    * @param isOutputPadded - make it more human readable by setting indentation
    *        with spaces. Turned off for use by HiveServer2
-   * @param showParColsSep - show partition column separator
    * @return string with formatted column information
    */
   public static String getAllColumnsInformation(List<FieldSchema> cols,
@@ -233,11 +232,11 @@ public final class MetaDataFormatUtils {
         appendColumnStatsNoFormatting(colBuffer, "", "", "", "", "", "", "", 
"");
       }
     }
-    colBuffer.append(comment == null ? "" : comment);
+    colBuffer.append(comment == null ? "" : 
HiveStringUtils.escapeJava(comment));
     colBuffer.append(LINE_DELIM);
   }
 
-  public static String getAllColumnsInformation(Index index) {
+  public static String getIndexInformation(Index index, boolean 
isOutputPadded) {
     StringBuilder indexInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
 
     List<String> indexColumns = new ArrayList<String>();
@@ -268,9 +267,10 @@ public final class MetaDataFormatUtils {
     IndexType indexType = HiveIndex.getIndexTypeByClassName(indexHandlerClass);
     indexColumns.add(indexType.getName());
 
-    indexColumns.add(index.getParameters().get("comment"));
+    String comment = index.getParameters().get("comment");
+    indexColumns.add(comment == null ? null : 
HiveStringUtils.escapeJava(comment));
 
-    formatOutput(indexColumns.toArray(new String[0]), indexInfo);
+    formatOutput(indexColumns.toArray(new String[0]), indexInfo, 
isOutputPadded);
 
     return indexInfo.toString();
   }
@@ -354,12 +354,12 @@ public final class MetaDataFormatUtils {
     return tableInfo.toString();
   }
 
-  public static String getTableInformation(Table table) {
+  public static String getTableInformation(Table table, boolean 
isOutputPadded) {
     StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
 
     // Table Metadata
     tableInfo.append(LINE_DELIM).append("# Detailed Table 
Information").append(LINE_DELIM);
-    getTableMetaDataInformation(tableInfo, table);
+    getTableMetaDataInformation(tableInfo, table, isOutputPadded);
 
     // Storage information.
     tableInfo.append(LINE_DELIM).append("# Storage 
Information").append(LINE_DELIM);
@@ -428,7 +428,8 @@ public final class MetaDataFormatUtils {
     }
   }
 
-  private static void getTableMetaDataInformation(StringBuilder tableInfo, 
Table  tbl) {
+  private static void getTableMetaDataInformation(StringBuilder tableInfo, 
Table  tbl,
+      boolean isOutputPadded) {
     formatOutput("Database:", tbl.getDbName(), tableInfo);
     formatOutput("Owner:", tbl.getOwner(), tableInfo);
     formatOutput("CreateTime:", formatDate(tbl.getTTable().getCreateTime()), 
tableInfo);
@@ -441,7 +442,7 @@ public final class MetaDataFormatUtils {
 
     if (tbl.getParameters().size() > 0) {
       tableInfo.append("Table Parameters:").append(LINE_DELIM);
-      displayAllParameters(tbl.getParameters(), tableInfo, false);
+      displayAllParameters(tbl.getParameters(), tableInfo, false, 
isOutputPadded);
     }
   }
 
@@ -465,7 +466,7 @@ public final class MetaDataFormatUtils {
    * including unicode.
    */
   private static void displayAllParameters(Map<String, String> params, 
StringBuilder tableInfo) {
-    displayAllParameters(params, tableInfo, true);
+    displayAllParameters(params, tableInfo, true, false);
   }
 
   /**
@@ -473,15 +474,16 @@ public final class MetaDataFormatUtils {
    * including unicode if escapeUnicode is true; otherwise the characters other
    * than unicode will be escaped.
    */
-
-  private static void displayAllParameters(Map<String, String> params, 
StringBuilder tableInfo, boolean escapeUnicode) {
+  private static void displayAllParameters(Map<String, String> params, 
StringBuilder tableInfo,
+      boolean escapeUnicode, boolean isOutputPadded) {
     List<String> keys = new ArrayList<String>(params.keySet());
     Collections.sort(keys);
     for (String key : keys) {
       tableInfo.append(FIELD_DELIM); // Ensures all params are indented.
       formatOutput(key,
-          escapeUnicode ? StringEscapeUtils.escapeJava(params.get(key)) : 
HiveStringUtils.escapeJava(params.get(key)),
-          tableInfo);
+          escapeUnicode ? StringEscapeUtils.escapeJava(params.get(key))
+              : HiveStringUtils.escapeJava(params.get(key)),
+          tableInfo, isOutputPadded);
     }
   }
 
@@ -550,21 +552,74 @@ public final class MetaDataFormatUtils {
     return "UNKNOWN";
   }
 
-  private static void formatOutput(String[] fields, StringBuilder tableInfo) {
-    for (String field : fields) {
-      if (field == null) {
-        tableInfo.append(FIELD_DELIM);
-        continue;
+  /**
+   * Prints a row with the given fields into the builder
+   * The last field could be a multiline field, and the extra lines should be 
padded
+   * @param fields The fields to print
+   * @param tableInfo The target builder
+   * @param isLastLinePadded Is the last field could be printed in multiple 
lines, if contains
+   *                         newlines?
+   */
+  private static void formatOutput(String[] fields, StringBuilder tableInfo,
+      boolean isLastLinePadded) {
+    int[] paddings = new int[fields.length-1];
+    if (fields.length > 1) {
+      for (int i = 0; i < fields.length - 1; i++) {
+        if (fields[i] == null) {
+          tableInfo.append(FIELD_DELIM);
+          continue;
+        }
+        tableInfo.append(String.format("%-" + ALIGNMENT + "s", 
fields[i])).append(FIELD_DELIM);
+        paddings[i] = ALIGNMENT > fields[i].length() ? ALIGNMENT : 
fields[i].length();
       }
-      tableInfo.append(String.format("%-" + ALIGNMENT + "s", 
field)).append(FIELD_DELIM);
     }
-    tableInfo.append(LINE_DELIM);
+    if (fields.length > 0) {
+      String value = fields[fields.length-1];
+      String unescapedValue =
+          (isLastLinePadded && value != null) ? 
value.replaceAll("\\\\n|\\\\r|\\\\r\\\\n","\n")
+              :value;
+      indentMultilineValue(unescapedValue, tableInfo, paddings, false);
+    } else {
+      tableInfo.append(LINE_DELIM);
+    }
+  }
+
+  /**
+   * Prints a row the given fields to a formatted line
+   * @param fields The fields to print
+   * @param tableInfo The target builder
+   */
+  private static void formatOutput(String[] fields, StringBuilder tableInfo) {
+    formatOutput(fields, tableInfo, false);
   }
 
-  private static void formatOutput(String name, String value,
-      StringBuilder tableInfo) {
+  /**
+   * Prints the name value pair, and if the value contains newlines, it add 
one more empty field
+   * before the two values (Assumes, the name value pair is already indented 
with it)
+   * @param name The field name to print
+   * @param value The value to print - might contain newlines
+   * @param tableInfo The target builder
+   */
+  private static void formatOutput(String name, String value, StringBuilder 
tableInfo) {
     tableInfo.append(String.format("%-" + ALIGNMENT + "s", 
name)).append(FIELD_DELIM);
-    tableInfo.append(String.format("%-" + ALIGNMENT + "s", 
value)).append(LINE_DELIM);
+    int colNameLength = ALIGNMENT > name.length() ? ALIGNMENT : name.length();
+    indentMultilineValue(value, tableInfo, new int[] {0, colNameLength}, true);
+  }
+
+  /**
+   * Prints the name value pair
+   * It the output is padded then unescape the value, so it could be printed 
in multiple lines.
+   * In this case it assumes the pair is already indented with a field 
delimiter
+   * @param name The field name to print
+   * @param value The value t print
+   * @param tableInfo The target builder
+   * @param isOutputPadded Should the value printed as a padded string?
+   */
+  private static void formatOutput(String name, String value, StringBuilder 
tableInfo,
+      boolean isOutputPadded) {
+    String unescapedValue =
+        (isOutputPadded && value != null) ? 
value.replaceAll("\\\\n|\\\\r|\\\\r\\\\n","\n"):value;
+    formatOutput(name, unescapedValue, tableInfo);
   }
 
   private static void formatWithIndentation(String colName, String colType, 
String colComment,
@@ -613,17 +668,51 @@ public final class MetaDataFormatUtils {
       }
     }
 
-    // comment indent processing for multi-line comments
-    // comments should be indented the same amount on each line
-    // if the first line comment starts indented by k,
-    // the following line comments should also be indented by k
-    String[] commentSegments = colComment.split("\n|\r|\r\n");
-    tableInfo.append(String.format("%-" + ALIGNMENT + "s", 
commentSegments[0])).append(LINE_DELIM);
     int colNameLength = ALIGNMENT > colName.length() ? ALIGNMENT : 
colName.length();
     int colTypeLength = ALIGNMENT > colType.length() ? ALIGNMENT : 
colType.length();
-    for (int i = 1; i < commentSegments.length; i++) {
-      tableInfo.append(String.format("%" + colNameLength + "s" + FIELD_DELIM + 
"%"
-          + colTypeLength + "s" + FIELD_DELIM + "%s", "", "", 
commentSegments[i])).append(LINE_DELIM);
+    indentMultilineValue(colComment, tableInfo, new int[]{colNameLength, 
colTypeLength}, false);
+  }
+
+  /**
+   * comment indent processing for multi-line values
+   * values should be indented the same amount on each line
+   * if the first line comment starts indented by k,
+   * the following line comments should also be indented by k
+   * @param value the value to write
+   * @param tableInfo the buffer to write to
+   * @param columnWidths the widths of the previous columns
+   * @param printNull print null as a string, or do not print anything
+   */
+  private static void indentMultilineValue(String value, StringBuilder 
tableInfo,
+      int[] columnWidths, boolean printNull) {
+    if (value==null) {
+      if (printNull) {
+        tableInfo.append(String.format("%-" + ALIGNMENT + "s", value));
+      }
+      tableInfo.append(LINE_DELIM);
+    } else {
+      String[] valueSegments = value.split("\n|\r|\r\n");
+      tableInfo.append(String.format("%-" + ALIGNMENT + "s", 
valueSegments[0])).append(LINE_DELIM);
+      for (int i = 1; i < valueSegments.length; i++) {
+        printPadding(tableInfo, columnWidths);
+        tableInfo.append(String.format("%-" + ALIGNMENT + "s", 
valueSegments[i]))
+            .append(LINE_DELIM);
+      }
+    }
+  }
+
+  /**
+   * Print the rigth padding, with the given column widths
+   * @param tableInfo The buffer to write to
+   * @param columnWidths The column widths
+   */
+  private static void printPadding(StringBuilder tableInfo, int[] 
columnWidths) {
+    for (int columnWidth : columnWidths) {
+      if (columnWidth == 0) {
+        tableInfo.append(FIELD_DELIM);
+      } else {
+        tableInfo.append(String.format("%" + columnWidth + "s" + FIELD_DELIM, 
""));
+      }
     }
   }
 
@@ -665,6 +754,7 @@ public final class MetaDataFormatUtils {
     formatOutput(ShowIndexesDesc.getSchema().split("#")[0].split(","), 
indexCols);
     return indexCols.toString();
   }
+
   public static MetaDataFormatter getFormatter(HiveConf conf) {
     if 
("json".equals(conf.get(HiveConf.ConfVars.HIVE_DDL_OUTPUT_FORMAT.varname, 
"text"))) {
       return new JsonMetaDataFormatter();

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
index 47d67b1..b990bda 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.hive.common.util.HiveStringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -139,7 +140,7 @@ class TextMetaDataFormatter implements MetaDataFormatter {
           if (part != null) {
             output = MetaDataFormatUtils.getPartitionInformation(part);
           } else {
-            output = MetaDataFormatUtils.getTableInformation(tbl);
+            output = MetaDataFormatUtils.getTableInformation(tbl, 
isOutputPadded);
           }
           outStream.write(output.getBytes("UTF-8"));
 
@@ -460,7 +461,7 @@ class TextMetaDataFormatter implements MetaDataFormatter {
       outStream.write(database.getBytes("UTF-8"));
       outStream.write(separator);
       if (comment != null) {
-        outStream.write(comment.getBytes("UTF-8"));
+        outStream.write(HiveStringUtils.escapeJava(comment).getBytes("UTF-8"));
       }
       outStream.write(separator);
       if (location != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/queries/clientpositive/escape_comments.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/escape_comments.q 
b/ql/src/test/queries/clientpositive/escape_comments.q
new file mode 100644
index 0000000..8c38690
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/escape_comments.q
@@ -0,0 +1,20 @@
+create database escape_comments_db comment 'a\nb';
+use escape_comments_db;
+create table escape_comments_tbl1
+(col1 string comment 'a\nb\'\;') comment 'a\nb'
+partitioned by (p1 string comment 'a\nb');
+create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb'
+as select col1 from escape_comments_tbl1;
+create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with 
deferred rebuild comment 'a\nb';
+
+describe database extended escape_comments_db;
+describe database escape_comments_db;
+show create table escape_comments_tbl1;
+describe formatted escape_comments_tbl1;
+describe pretty escape_comments_tbl1;
+describe escape_comments_tbl1;
+show create table escape_comments_view1;
+describe formatted escape_comments_view1;
+show formatted index on escape_comments_tbl1;
+
+drop database escape_comments_db cascade;

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out 
b/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
index 9b84227..4e43819 100644
--- 
a/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
+++ 
b/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
@@ -63,11 +63,11 @@ Bucket Columns:             []
 Sort Columns:          []                       
                 
 # View Information              
-View Original Text:    SELECT key, value        
-FROM src                
-WHERE key=86            
+View Original Text:    SELECT key, value        
+                               FROM src            
+                               WHERE key=86        
 View Expanded Text:    SELECT `src`.`key`, `src`.`value`        
-FROM `default`.`src`            
-WHERE `src`.`key`=86            
+                               FROM `default`.`src`
+                               WHERE `src`.`key`=86
 FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE 
VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has partitions
  The following view has partition, it could not be replaced: 
default.testViewPart

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/alter_table_invalidate_column_stats.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/alter_table_invalidate_column_stats.q.out 
b/ql/src/test/results/clientpositive/alter_table_invalidate_column_stats.q.out
index f3c10ee..85d7dc4 100644
--- 
a/ql/src/test/results/clientpositive/alter_table_invalidate_column_stats.q.out
+++ 
b/ql/src/test/results/clientpositive/alter_table_invalidate_column_stats.q.out
@@ -296,7 +296,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col1                   int                     from deserializer               
                                                 
 PREHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col2
@@ -305,7 +305,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col2
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col2                   string                  from deserializer               
                                                 
 PREHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col3
@@ -314,7 +314,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col3
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col3                   string                  from deserializer               
                                                 
 PREHOOK: query: alter table statsdb1.testpart1 replace columns (col1 int, col2 
string, col4 string) cascade
@@ -353,7 +353,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col4                   string                  from deserializer               
                                                 
 PREHOOK: query: alter table statsdb1.testpart1 change column col1 col1 string
@@ -382,7 +382,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col1                   string                  from deserializer               
                                                 
 PREHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col2
@@ -400,7 +400,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col4                   string                  from deserializer               
                                                 
 PREHOOK: query: alter table statsdb1.testpart1 rename to statsdb2.testpart2
@@ -762,7 +762,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col1                   int                     from deserializer               
                                                 
 PREHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col2
@@ -771,7 +771,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col2
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col2                   string                  from deserializer               
                                                 
 PREHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col3
@@ -780,7 +780,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part3') col3
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col3                   string                  from deserializer               
                                                 
 PREHOOK: query: alter table statsdb1.testpart1 replace columns (col1 int, col2 
string, col4 string) cascade
@@ -819,7 +819,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col4                   string                  from deserializer               
                                                 
 PREHOOK: query: alter table statsdb1.testpart1 change column col1 col1 string
@@ -848,7 +848,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col1                   string                  from deserializer               
                                                 
 PREHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col2
@@ -866,7 +866,7 @@ PREHOOK: Input: statsdb1@testpart1
 POSTHOOK: query: describe formatted statsdb1.testpart1 partition (part = 
'part1') col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testpart1
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 col4                   string                  from deserializer               
                                                 
 PREHOOK: query: alter table statsdb1.testpart1 rename to statsdb2.testpart2

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/alter_view_as_select.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_view_as_select.q.out 
b/ql/src/test/results/clientpositive/alter_view_as_select.q.out
index 2d82395..dc1814e 100644
--- a/ql/src/test/results/clientpositive/alter_view_as_select.q.out
+++ b/ql/src/test/results/clientpositive/alter_view_as_select.q.out
@@ -134,14 +134,14 @@ Bucket Columns:           []
 Sort Columns:          []                       
                 
 # View Information              
-View Original Text:    SELECT * FROM src        
-WHERE key > 80 AND key < 100            
-ORDER BY key, value             
-LIMIT 10                
+View Original Text:    SELECT * FROM src        
+                               WHERE key > 80 AND key < 100
+                               ORDER BY key, value 
+                               LIMIT 10            
 View Expanded Text:    SELECT `src`.`key`, `src`.`value` FROM `default`.`src`  
 
-WHERE `src`.`key` > 80 AND `src`.`key` < 100            
-ORDER BY `src`.`key`, `src`.`value`             
-LIMIT 10                
+                               WHERE `src`.`key` > 80 AND `src`.`key` < 100
+                               ORDER BY `src`.`key`, `src`.`value`
+                               LIMIT 10            
 PREHOOK: query: DROP VIEW tv.testView
 PREHOOK: type: DROPVIEW
 PREHOOK: Input: tv@testview

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out 
b/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
index 84232b6..d52f020 100644
--- a/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
+++ b/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
@@ -96,7 +96,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=2, part='partB') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 key                    int                     from deserializer               
                                                 
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=2, part='partB') value
@@ -105,7 +105,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=2, part='partB') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 value                  string                  from deserializer               
                                                 
 PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr=2, part) compute statistics for columns
@@ -144,7 +144,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=3, part='partA') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 key                    int                     from deserializer               
                                                 
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=3, part='partA') value
@@ -153,7 +153,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=3, part='partA') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 value                  string                  from deserializer               
                                                 
 PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr, part) compute statistics for columns
@@ -194,7 +194,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partA') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 key                    int                     from deserializer               
                                                 
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partA') value
@@ -203,7 +203,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partA') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 value                  string                  from deserializer               
                                                 
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') key
@@ -212,7 +212,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 key                    int                     from deserializer               
                                                 
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') value
@@ -221,7 +221,7 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                 
+# col_name             data_type               comment                         
                                                 
                                                                                
 
 value                  string                  from deserializer               
                                                 
 PREHOOK: query: analyze table partcolstats partition (ds, hr, part) compute 
statistics for columns

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/create_like.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_like.q.out 
b/ql/src/test/results/clientpositive/create_like.q.out
index 0111c94..58d9879 100644
--- a/ql/src/test/results/clientpositive/create_like.q.out
+++ b/ql/src/test/results/clientpositive/create_like.q.out
@@ -354,7 +354,28 @@ Retention:                 0
 #### A masked pattern was here ####
 Table Type:            MANAGED_TABLE            
 Table Parameters:               
-       avro.schema.literal     {\n  \"namespace\": 
\"testing.hive.avro.serde\",\n  \"name\": \"doctors\",\n  \"type\": 
\"record\",\n  \"fields\": [\n    {\n      \"name\":\"number\",\n      
\"type\":\"int\",\n      \"doc\":\"Order of playing the role\"\n    },\n    {\n 
     \"name\":\"first_name\",\n      \"type\":\"string\",\n      
\"doc\":\"first name of actor playing role\"\n    },\n    {\n      
\"name\":\"last_name\",\n      \"type\":\"string\",\n      \"doc\":\"last name 
of actor playing role\"\n    }\n  ]\n}
+       avro.schema.literal     {                   
+                                 \"namespace\": \"testing.hive.avro.serde\",
+                                 \"name\": \"doctors\",
+                                 \"type\": \"record\",
+                                 \"fields\": [     
+                                   {               
+                                     \"name\":\"number\",
+                                     \"type\":\"int\",
+                                     \"doc\":\"Order of playing the role\"
+                                   },              
+                                   {               
+                                     \"name\":\"first_name\",
+                                     \"type\":\"string\",
+                                     \"doc\":\"first name of actor playing 
role\"
+                                   },              
+                                   {               
+                                     \"name\":\"last_name\",
+                                     \"type\":\"string\",
+                                     \"doc\":\"last name of actor playing 
role\"
+                                   }               
+                                 ]                 
+                               }                   
        k1                      v1                  
        k2                      v2                  
 #### A masked pattern was here ####
@@ -402,7 +423,28 @@ Retention:                 0
 Table Type:            MANAGED_TABLE            
 Table Parameters:               
        COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       avro.schema.literal     {\n  \"namespace\": 
\"testing.hive.avro.serde\",\n  \"name\": \"doctors\",\n  \"type\": 
\"record\",\n  \"fields\": [\n    {\n      \"name\":\"number\",\n      
\"type\":\"int\",\n      \"doc\":\"Order of playing the role\"\n    },\n    {\n 
     \"name\":\"first_name\",\n      \"type\":\"string\",\n      
\"doc\":\"first name of actor playing role\"\n    },\n    {\n      
\"name\":\"last_name\",\n      \"type\":\"string\",\n      \"doc\":\"last name 
of actor playing role\"\n    }\n  ]\n}
+       avro.schema.literal     {                   
+                                 \"namespace\": \"testing.hive.avro.serde\",
+                                 \"name\": \"doctors\",
+                                 \"type\": \"record\",
+                                 \"fields\": [     
+                                   {               
+                                     \"name\":\"number\",
+                                     \"type\":\"int\",
+                                     \"doc\":\"Order of playing the role\"
+                                   },              
+                                   {               
+                                     \"name\":\"first_name\",
+                                     \"type\":\"string\",
+                                     \"doc\":\"first name of actor playing 
role\"
+                                   },              
+                                   {               
+                                     \"name\":\"last_name\",
+                                     \"type\":\"string\",
+                                     \"doc\":\"last name of actor playing 
role\"
+                                   }               
+                                 ]                 
+                               }                   
        numFiles                0                   
        numRows                 0                   
        rawDataSize             0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/create_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_view.q.out 
b/ql/src/test/results/clientpositive/create_view.q.out
index 7281185..12457b4 100644
--- a/ql/src/test/results/clientpositive/create_view.q.out
+++ b/ql/src/test/results/clientpositive/create_view.q.out
@@ -775,9 +775,9 @@ Sort Columns:               []
                 
 # View Information              
 View Original Text:    SELECT test_translate('abc', 'a', 'b')   
-FROM table1             
+                               FROM table1         
 View Expanded Text:    SELECT `_c0` AS `c` FROM (SELECT 
`test_translate`('abc', 'a', 'b')       
-FROM `default`.`table1`) `default.view8`                
+                               FROM `default`.`table1`) `default.view8`
 PREHOOK: query: SELECT * FROM view8
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
@@ -855,9 +855,9 @@ Sort Columns:               []
                 
 # View Information              
 View Original Text:    SELECT test_max(length(value))   
-FROM src                
+                               FROM src            
 View Expanded Text:    SELECT `_c0` AS `m` FROM (SELECT 
`test_max`(length(`src`.`value`))       
-FROM `default`.`src`) `default.view9`           
+                               FROM `default`.`src`) `default.view9`
 PREHOOK: query: SELECT * FROM view9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -933,9 +933,9 @@ Sort Columns:               []
                 
 # View Information              
 View Original Text:    SELECT test_max(length(value))   
-FROM src                
+                               FROM src            
 View Expanded Text:    SELECT `_c0` AS `m` FROM (SELECT 
`test_max`(length(`src`.`value`))       
-FROM `default`.`src`) `default.view9`           
+                               FROM `default`.`src`) `default.view9`
 PREHOOK: query: SELECT * FROM view9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -1077,9 +1077,9 @@ Sort Columns:             []
                 
 # View Information              
 View Original Text:    SELECT test_explode(array(1,2,3)) AS (boom)      
-FROM table1             
+                               FROM table1         
 View Expanded Text:    SELECT `test_explode`(array(1,2,3)) AS (`boom`)  
-FROM `default`.`table1`                 
+                               FROM `default`.`table1`
 PREHOOK: query: SELECT * FROM view11
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
@@ -1233,10 +1233,10 @@ Bucket Columns:         []
 Sort Columns:          []                       
                 
 # View Information              
-View Original Text:    SELECT s.key     
-FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s                 
-View Expanded Text:    SELECT `s`.`key`         
-FROM `default`.`srcbucket` TABLESAMPLE (BUCKET 1 OUT OF 5 ON `key`) `s`        
         
+View Original Text:    SELECT s.key             
+                               FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 
ON key) s
+View Expanded Text:    SELECT `s`.`key`         
+                               FROM `default`.`srcbucket` TABLESAMPLE (BUCKET 
1 OUT OF 5 ON `key`) `s`
 PREHOOK: query: SELECT * FROM view13
 ORDER BY key LIMIT 12
 PREHOOK: type: QUERY
@@ -1355,25 +1355,25 @@ Sort Columns:           []
                 
 # View Information              
 View Original Text:    SELECT unionsrc1.key as k1, unionsrc1.value as v1,      
 
-       unionsrc2.key as k2, unionsrc2.value as v2               
-FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1      
         
-                         UNION  ALL             
-      select s2.key as key, s2.value as value from src s2 where s2.key < 10) 
unionsrc1          
-JOIN            
-     (select 'tst1' as key, cast(count(1) as string) as value from src s3      
         
-                         UNION  ALL             
-      select s4.key as key, s4.value as value from src s4 where s4.key < 10) 
unionsrc2          
-ON (unionsrc1.key = unionsrc2.key)              
+                                      unionsrc2.key as k2, unionsrc2.value as 
v2
+                               FROM (select 'tst1' as key, cast(count(1) as 
string) as value from src s1
+                                                        UNION  ALL
+                                     select s2.key as key, s2.value as value 
from src s2 where s2.key < 10) unionsrc1
+                               JOIN                
+                                    (select 'tst1' as key, cast(count(1) as 
string) as value from src s3
+                                                        UNION  ALL
+                                     select s4.key as key, s4.value as value 
from src s4 where s4.key < 10) unionsrc2
+                               ON (unionsrc1.key = unionsrc2.key)
 View Expanded Text:    SELECT `unionsrc1`.`key` as `k1`, `unionsrc1`.`value` 
as `v1`,   
-       `unionsrc2`.`key` as `k2`, `unionsrc2`.`value` as `v2`           
-FROM (select 'tst1' as `key`, cast(count(1) as string) as `value` from 
`default`.`src` `s1`             
-                         UNION  ALL             
-      select `s2`.`key` as `key`, `s2`.`value` as `value` from `default`.`src` 
`s2` where `s2`.`key` < 10) `unionsrc1`          
-JOIN            
-     (select 'tst1' as `key`, cast(count(1) as string) as `value` from 
`default`.`src` `s3`             
-                         UNION  ALL             
-      select `s4`.`key` as `key`, `s4`.`value` as `value` from `default`.`src` 
`s4` where `s4`.`key` < 10) `unionsrc2`          
-ON (`unionsrc1`.`key` = `unionsrc2`.`key`)              
+                                      `unionsrc2`.`key` as `k2`, 
`unionsrc2`.`value` as `v2`
+                               FROM (select 'tst1' as `key`, cast(count(1) as 
string) as `value` from `default`.`src` `s1`
+                                                        UNION  ALL
+                                     select `s2`.`key` as `key`, `s2`.`value` 
as `value` from `default`.`src` `s2` where `s2`.`key` < 10) `unionsrc1`
+                               JOIN                
+                                    (select 'tst1' as `key`, cast(count(1) as 
string) as `value` from `default`.`src` `s3`
+                                                        UNION  ALL
+                                     select `s4`.`key` as `key`, `s4`.`value` 
as `value` from `default`.`src` `s4` where `s4`.`key` < 10) `unionsrc2`
+                               ON (`unionsrc1`.`key` = `unionsrc2`.`key`)
 PREHOOK: query: SELECT * FROM view14
 ORDER BY k1
 PREHOOK: type: QUERY
@@ -1471,11 +1471,11 @@ Sort Columns:           []
                 
 # View Information              
 View Original Text:    SELECT key,COUNT(value) AS value_count   
-FROM src                
-GROUP BY key            
+                               FROM src            
+                               GROUP BY key        
 View Expanded Text:    SELECT `src`.`key`,COUNT(`src`.`value`) AS 
`value_count`         
-FROM `default`.`src`            
-GROUP BY `src`.`key`            
+                               FROM `default`.`src`
+                               GROUP BY `src`.`key`
 PREHOOK: query: SELECT * FROM view15
 ORDER BY value_count DESC, key
 LIMIT 10
@@ -1556,9 +1556,9 @@ Sort Columns:             []
                 
 # View Information              
 View Original Text:    SELECT DISTINCT value    
-FROM src                
+                               FROM src            
 View Expanded Text:    SELECT DISTINCT `src`.`value`    
-FROM `default`.`src`            
+                               FROM `default`.`src`
 PREHOOK: query: SELECT * FROM view16
 ORDER BY value
 LIMIT 10

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/create_view_partitioned.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_view_partitioned.q.out 
b/ql/src/test/results/clientpositive/create_view_partitioned.q.out
index 15d777a..4373303 100644
--- a/ql/src/test/results/clientpositive/create_view_partitioned.q.out
+++ b/ql/src/test/results/clientpositive/create_view_partitioned.q.out
@@ -86,12 +86,12 @@ Bucket Columns:             []
 Sort Columns:          []                       
                 
 # View Information              
-View Original Text:    SELECT key, value        
-FROM src                
-WHERE key=86            
+View Original Text:    SELECT key, value        
+                               FROM src            
+                               WHERE key=86        
 View Expanded Text:    SELECT `src`.`key`, `src`.`value`        
-FROM `default`.`src`            
-WHERE `src`.`key`=86            
+                               FROM `default`.`src`
+                               WHERE `src`.`key`=86
 PREHOOK: query: SELECT * FROM vp1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -406,12 +406,12 @@ Bucket Columns:           []
 Sort Columns:          []                       
                 
 # View Information              
-View Original Text:    SELECT key, value        
-FROM src                
-WHERE key=86            
+View Original Text:    SELECT key, value        
+                               FROM src            
+                               WHERE key=86        
 View Expanded Text:    SELECT `key` AS `k`, `value` AS `v` FROM (SELECT 
`src`.`key`, `src`.`value`      
-FROM `default`.`src`            
-WHERE `src`.`key`=86) `default.vp3`             
+                               FROM `default`.`src`
+                               WHERE `src`.`key`=86) `default.vp3`
 PREHOOK: query: ALTER VIEW vp3
 ADD PARTITION (v='val_86')
 PREHOOK: type: ALTERTABLE_ADDPARTS

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/create_view_translate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_view_translate.q.out 
b/ql/src/test/results/clientpositive/create_view_translate.q.out
index 2789f8f..43b9062 100644
--- a/ql/src/test/results/clientpositive/create_view_translate.q.out
+++ b/ql/src/test/results/clientpositive/create_view_translate.q.out
@@ -90,11 +90,11 @@ Sort Columns:               []
                 
 # View Information              
 View Original Text:    select key, value from (         
-  select key, value from src            
-) a             
+                                 select key, value from src
+                               ) a                 
 View Expanded Text:    select `a`.`key`, `a`.`value` from (     
-  select `src`.`key`, `src`.`value` from `default`.`src`                
-) `a`           
+                                 select `src`.`key`, `src`.`value` from 
`default`.`src`
+                               ) `a`               
 PREHOOK: query: drop view v
 PREHOOK: type: DROPVIEW
 PREHOOK: Input: default@v

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/create_with_constraints.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_with_constraints.q.out 
b/ql/src/test/results/clientpositive/create_with_constraints.q.out
index b040e43..056ca38 100644
--- a/ql/src/test/results/clientpositive/create_with_constraints.q.out
+++ b/ql/src/test/results/clientpositive/create_with_constraints.q.out
@@ -259,7 +259,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table1           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          a                       
+Column Names:          a                        
 PREHOOK: query: DESCRIBE FORMATTED table2
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table2
@@ -301,7 +301,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table2           
 Constraint Name:       pk1                      
-Column Names:          a                       
+Column Names:          a                        
 PREHOOK: query: DESCRIBE FORMATTED table3
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table3
@@ -342,7 +342,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table3           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
                 
 # Foreign Keys          
 Table:                 default.table3           
@@ -390,7 +390,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table4           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
                 
 # Foreign Keys          
 Table:                 default.table4           
@@ -440,7 +440,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table5           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
                 
 # Foreign Keys          
 Table:                 default.table5           
@@ -488,7 +488,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table6           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
                 
 # Foreign Keys          
 Table:                 default.table6           
@@ -539,7 +539,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table7           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          a                       
+Column Names:          a                        
 PREHOOK: query: DESCRIBE FORMATTED table8
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table8
@@ -581,7 +581,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table8           
 Constraint Name:       pk8                      
-Column Names:          a                       
+Column Names:          a                        
 PREHOOK: query: DESCRIBE FORMATTED table9
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table9
@@ -665,7 +665,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table10          
 Constraint Name:       pk10                     
-Column Names:          a                       
+Column Names:          a                        
                 
 # Foreign Keys          
 Table:                 default.table10          
@@ -715,7 +715,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table11          
 Constraint Name:       pk11                     
-Column Names:          a                       
+Column Names:          a                        
                 
 # Foreign Keys          
 Table:                 default.table11          
@@ -844,7 +844,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table3           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
 PREHOOK: query: DESCRIBE FORMATTED table6
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table6
@@ -886,7 +886,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table6           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
 PREHOOK: query: ALTER TABLE table2 ADD CONSTRAINT pkt2 primary key (a) disable 
novalidate
 PREHOOK: type: ALTERTABLE_ADDCONSTRAINT
 POSTHOOK: query: ALTER TABLE table2 ADD CONSTRAINT pkt2 primary key (a) 
disable novalidate
@@ -940,7 +940,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table2           
 Constraint Name:       pkt2                     
-Column Names:          a                       
+Column Names:          a                        
 PREHOOK: query: DESCRIBE FORMATTED table3
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table3
@@ -981,7 +981,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table3           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
                 
 # Foreign Keys          
 Table:                 default.table3           
@@ -1029,7 +1029,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 default.table6           
 Constraint Name:       #### A masked pattern was here ####      
-Column Names:          x                       
+Column Names:          x                        
                 
 # Foreign Keys          
 Table:                 default.table6           
@@ -1117,7 +1117,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 dbconstraint.table2      
 Constraint Name:       pk1                      
-Column Names:          a                       
+Column Names:          a                        
 PREHOOK: query: ALTER TABLE dbconstraint.table2 DROP CONSTRAINT pk1
 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT
 POSTHOOK: query: ALTER TABLE dbconstraint.table2 DROP CONSTRAINT pk1
@@ -1212,7 +1212,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 dbconstraint.table2      
 Constraint Name:       pk1                      
-Column Names:          a                       
+Column Names:          a                        
 PREHOOK: query: ALTER TABLE dbconstraint.table2  ADD CONSTRAINT fkx FOREIGN 
KEY (b) REFERENCES table1(a)  DISABLE NOVALIDATE
 PREHOOK: type: ALTERTABLE_ADDCONSTRAINT
 POSTHOOK: query: ALTER TABLE dbconstraint.table2  ADD CONSTRAINT fkx FOREIGN 
KEY (b) REFERENCES table1(a)  DISABLE NOVALIDATE
@@ -1258,7 +1258,7 @@ Storage Desc Params:
 # Primary Key           
 Table:                 dbconstraint.table2      
 Constraint Name:       pk1                      
-Column Names:          a                       
+Column Names:          a                        
                 
 # Foreign Keys          
 Table:                 dbconstraint.table2      

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/describe_comment_indent.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/describe_comment_indent.q.out 
b/ql/src/test/results/clientpositive/describe_comment_indent.q.out
index 5a01de1..5b41fb8 100644
--- a/ql/src/test/results/clientpositive/describe_comment_indent.q.out
+++ b/ql/src/test/results/clientpositive/describe_comment_indent.q.out
@@ -34,10 +34,10 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@test_table
 col1                   int                     col1 one line comment
 col2                   string                  col2                
-                                               two lines comment
+                                               two lines comment   
 col3                   string                  col3                
-                                               three lines
-                                               comment
+                                               three lines         
+                                               comment             
 PREHOOK: query: DESCRIBE FORMATTED test_table
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@test_table
@@ -48,10 +48,10 @@ POSTHOOK: Input: default@test_table
                 
 col1                   int                     col1 one line comment
 col2                   string                  col2                
-                                               two lines comment
+                                               two lines comment   
 col3                   string                  col3                
-                                               three lines
-                                               comment
+                                               three lines         
+                                               comment             
                 
 # Detailed Table Information            
 Database:              default                  
@@ -61,7 +61,8 @@ Retention:            0
 Table Type:            MANAGED_TABLE            
 Table Parameters:               
        COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       comment                 table comment\ntwo lines
+       comment                 table comment       
+                               two lines           
        numFiles                0                   
        numRows                 0                   
        rawDataSize             0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/d729b454/ql/src/test/results/clientpositive/escape_comments.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/escape_comments.q.out 
b/ql/src/test/results/clientpositive/escape_comments.q.out
new file mode 100644
index 0000000..0b8c5c5
--- /dev/null
+++ b/ql/src/test/results/clientpositive/escape_comments.q.out
@@ -0,0 +1,213 @@
+PREHOOK: query: create database escape_comments_db comment 'a\nb'
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:escape_comments_db
+POSTHOOK: query: create database escape_comments_db comment 'a\nb'
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:escape_comments_db
+PREHOOK: query: use escape_comments_db
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:escape_comments_db
+POSTHOOK: query: use escape_comments_db
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:escape_comments_db
+PREHOOK: query: create table escape_comments_tbl1
+(col1 string comment 'a\nb\';') comment 'a\nb'
+partitioned by (p1 string comment 'a\nb')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:escape_comments_db
+PREHOOK: Output: escape_comments_db@escape_comments_tbl1
+POSTHOOK: query: create table escape_comments_tbl1
+(col1 string comment 'a\nb\';') comment 'a\nb'
+partitioned by (p1 string comment 'a\nb')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:escape_comments_db
+POSTHOOK: Output: escape_comments_db@escape_comments_tbl1
+PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') 
comment 'a\nb'
+as select col1 from escape_comments_tbl1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: escape_comments_db@escape_comments_tbl1
+PREHOOK: Output: database:escape_comments_db
+PREHOOK: Output: escape_comments_db@escape_comments_view1
+POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') 
comment 'a\nb'
+as select col1 from escape_comments_tbl1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: escape_comments_db@escape_comments_tbl1
+POSTHOOK: Output: database:escape_comments_db
+POSTHOOK: Output: escape_comments_db@escape_comments_view1
+PREHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 
'COMPACT' with deferred rebuild comment 'a\nb'
+PREHOOK: type: CREATEINDEX
+PREHOOK: Input: escape_comments_db@escape_comments_tbl1
+POSTHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 
'COMPACT' with deferred rebuild comment 'a\nb'
+POSTHOOK: type: CREATEINDEX
+POSTHOOK: Input: escape_comments_db@escape_comments_tbl1
+POSTHOOK: Output: 
escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__
+PREHOOK: query: describe database extended escape_comments_db
+PREHOOK: type: DESCDATABASE
+PREHOOK: Input: database:escape_comments_db
+POSTHOOK: query: describe database extended escape_comments_db
+POSTHOOK: type: DESCDATABASE
+POSTHOOK: Input: database:escape_comments_db
+escape_comments_db     a\nb    location/in/test        hive_test_user  USER    
+PREHOOK: query: describe database escape_comments_db
+PREHOOK: type: DESCDATABASE
+PREHOOK: Input: database:escape_comments_db
+POSTHOOK: query: describe database escape_comments_db
+POSTHOOK: type: DESCDATABASE
+POSTHOOK: Input: database:escape_comments_db
+escape_comments_db     a\nb    location/in/test        hive_test_user  USER    
+PREHOOK: query: show create table escape_comments_tbl1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: escape_comments_db@escape_comments_tbl1
+POSTHOOK: query: show create table escape_comments_tbl1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: escape_comments_db@escape_comments_tbl1
+CREATE TABLE `escape_comments_tbl1`(
+  `col1` string COMMENT 'a\nb\'\;')
+COMMENT 'a\nb'
+PARTITIONED BY ( 
+  `p1` string COMMENT 'a\nb')
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: describe formatted escape_comments_tbl1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: escape_comments_db@escape_comments_tbl1
+POSTHOOK: query: describe formatted escape_comments_tbl1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: escape_comments_db@escape_comments_tbl1
+# col_name             data_type               comment             
+                
+col1                   string                  a                   
+                                               b';                 
+                
+# Partition Information                 
+# col_name             data_type               comment             
+                
+p1                     string                  a                   
+                                               b                   
+                
+# Detailed Table Information            
+Database:              escape_comments_db       
+#### A masked pattern was here ####
+Retention:             0                        
+#### A masked pattern was here ####
+Table Type:            MANAGED_TABLE            
+Table Parameters:               
+       comment                 a                   
+                               b                   
+#### A masked pattern was here ####
+                
+# Storage Information           
+SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
+InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
+OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
+Compressed:            No                       
+Num Buckets:           -1                       
+Bucket Columns:        []                       
+Sort Columns:          []                       
+Storage Desc Params:            
+       serialization.format    1                   
+PREHOOK: query: describe pretty escape_comments_tbl1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: escape_comments_db@escape_comments_tbl1
+POSTHOOK: query: describe pretty escape_comments_tbl1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: escape_comments_db@escape_comments_tbl1
+col_name       data_type       comment
+               
+col1           string          a
+                               b';
+p1             string          a
+                               b
+                
+# Partition Information                 
+col_name       data_type       comment
+               
+p1             string          a
+                               b
+PREHOOK: query: describe escape_comments_tbl1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: escape_comments_db@escape_comments_tbl1
+POSTHOOK: query: describe escape_comments_tbl1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: escape_comments_db@escape_comments_tbl1
+col1                   string                  a                   
+                                               b';                 
+p1                     string                  a                   
+                                               b                   
+                
+# Partition Information                 
+# col_name             data_type               comment             
+                
+p1                     string                  a                   
+                                               b                   
+PREHOOK: query: show create table escape_comments_view1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: escape_comments_db@escape_comments_view1
+POSTHOOK: query: show create table escape_comments_view1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: escape_comments_db@escape_comments_view1
+CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select 
`escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) 
`escape_comments_db.escape_comments_view1`
+PREHOOK: query: describe formatted escape_comments_view1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: escape_comments_db@escape_comments_view1
+POSTHOOK: query: describe formatted escape_comments_view1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: escape_comments_db@escape_comments_view1
+# col_name             data_type               comment             
+                
+col1                   string                  a                   
+                                               b                   
+                
+# Detailed Table Information            
+Database:              escape_comments_db       
+#### A masked pattern was here ####
+Retention:             0                        
+Table Type:            VIRTUAL_VIEW             
+Table Parameters:               
+       comment                 a                   
+                               b                   
+#### A masked pattern was here ####
+                
+# Storage Information           
+SerDe Library:         null                     
+InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
+OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
+Compressed:            No                       
+Num Buckets:           -1                       
+Bucket Columns:        []                       
+Sort Columns:          []                       
+                
+# View Information              
+View Original Text:    select col1 from escape_comments_tbl1    
+View Expanded Text:    SELECT `col1` AS `col1` FROM (select 
`escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) 
`escape_comments_db.escape_comments_view1`  
+PREHOOK: query: show formatted index on escape_comments_tbl1
+PREHOOK: type: SHOWINDEXES
+POSTHOOK: query: show formatted index on escape_comments_tbl1
+POSTHOOK: type: SHOWINDEXES
+idx_name               tab_name                col_names               
idx_tab_name            idx_type                comment             
+                                        
+                                        
+index2                 escape_comments_tbl1    col1                    
escape_comments_db__escape_comments_tbl1_index2__       compact                 
a                   
+                                                                               
                                                                        b       
            
+PREHOOK: query: drop database escape_comments_db cascade
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:escape_comments_db
+PREHOOK: Output: database:escape_comments_db
+PREHOOK: Output: 
escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__
+PREHOOK: Output: escape_comments_db@escape_comments_tbl1
+PREHOOK: Output: escape_comments_db@escape_comments_view1
+POSTHOOK: query: drop database escape_comments_db cascade
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:escape_comments_db
+POSTHOOK: Output: database:escape_comments_db
+POSTHOOK: Output: 
escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__
+POSTHOOK: Output: escape_comments_db@escape_comments_tbl1
+POSTHOOK: Output: escape_comments_db@escape_comments_view1

Reply via email to