Author: navis
Date: Mon Dec 16 01:51:47 2013
New Revision: 1551103
URL: http://svn.apache.org/r1551103
Log:
HIVE-5276 : Skip redundant string encoding/decoding for hiveserver2 (Navis
Reviewed by Carl Steinbach)
Added:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchFormatter.java
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Mon Dec 16
01:51:47 2013
@@ -1413,6 +1413,7 @@ public class Driver implements CommandPr
try {
SessionState.get().getHiveHistory().logPlanProgress(plan);
} catch (Exception e) {
+ // ignore
}
}
console.printInfo("OK");
@@ -1480,7 +1481,6 @@ public class Driver implements CommandPr
tskRun.runSequential();
}
running.put(tskRes, tskRun);
- return;
}
/**
@@ -1524,14 +1524,17 @@ public class Driver implements CommandPr
Thread.sleep(SLEEP_TIME);
} catch (InterruptedException ie) {
// Do Nothing
- ;
}
resultIterator = results.iterator();
}
}
- public boolean getResults(ArrayList<String> res) throws IOException,
CommandNeedRetryException {
- if (plan != null && plan.getFetchTask() != null) {
+ public boolean isFetchingTable() {
+ return plan != null && plan.getFetchTask() != null;
+ }
+
+ public boolean getResults(List res) throws IOException,
CommandNeedRetryException {
+ if (isFetchingTable()) {
FetchTask ft = plan.getFetchTask();
ft.setMaxRows(maxRows);
return ft.fetch(res);
Added:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java?rev=1551103&view=auto
==============================================================================
---
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java
(added)
+++
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java
Mon Dec 16 01:51:47 2013
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
+import static
org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * serialize row by user specified serde and call toString() to make string
type result
+ */
+public class DefaultFetchFormatter<T> implements FetchFormatter<String> {
+
+ private SerDe mSerde;
+
+ @Override
+ public void initialize(Configuration hconf, Properties props) throws
HiveException {
+ try {
+ mSerde = initializeSerde(hconf, props);
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ private SerDe initializeSerde(Configuration conf, Properties props) throws
Exception {
+ String serdeName = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE);
+ Class<? extends SerDe> serdeClass = Class.forName(serdeName, true,
+ JavaUtils.getClassLoader()).asSubclass(SerDe.class);
+ // cast only needed for Hadoop 0.17 compatibility
+ SerDe serde = ReflectionUtils.newInstance(serdeClass, null);
+
+ Properties serdeProps = new Properties();
+ if (serde instanceof DelimitedJSONSerDe) {
+ serdeProps.put(SERIALIZATION_FORMAT,
props.getProperty(SERIALIZATION_FORMAT));
+ serdeProps.put(SERIALIZATION_NULL_FORMAT,
props.getProperty(SERIALIZATION_NULL_FORMAT));
+ }
+ serde.initialize(conf, serdeProps);
+ return serde;
+ }
+
+ @Override
+ public String convert(Object row, ObjectInspector rowOI) throws Exception {
+ return mSerde.serialize(row, rowOI).toString();
+ }
+
+ @Override
+ public void close() throws IOException {
+ }
+}
Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchFormatter.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchFormatter.java?rev=1551103&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchFormatter.java
(added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchFormatter.java
Mon Dec 16 01:51:47 2013
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+/**
+ * internal-use only
+ *
+ * Used in ListSinkOperator for formatting final output
+ */
+public interface FetchFormatter<T> extends Closeable {
+
+ void initialize(Configuration hconf, Properties props) throws Exception;
+
+ T convert(Object row, ObjectInspector rowOI) throws Exception;
+
+ public static class ThriftFormatter implements FetchFormatter<Object> {
+
+ @Override
+ public void initialize(Configuration hconf, Properties props) throws
Exception {
+ }
+
+ @Override
+ public Object convert(Object row, ObjectInspector rowOI) throws Exception {
+ StructObjectInspector structOI = (StructObjectInspector) rowOI;
+ List<? extends StructField> fields = structOI.getAllStructFieldRefs();
+
+ Object[] converted = new Object[fields.size()];
+ for (int i = 0 ; i < converted.length; i++) {
+ StructField fieldRef = fields.get(i);
+ Object field = structOI.getStructFieldData(row, fieldRef);
+ converted[i] = field == null ? null :
+ SerDeUtils.toThriftPayload(field,
fieldRef.getFieldObjectInspector());
+ }
+ return converted;
+ }
+
+ @Override
+ public void close() throws IOException {
+ }
+ }
+}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java Mon
Dec 16 01:51:47 2013
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec;
import java.io.IOException;
import java.io.Serializable;
-import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
@@ -122,14 +121,13 @@ public class FetchTask extends Task<Fetc
this.maxRows = maxRows;
}
- @Override
- public boolean fetch(ArrayList<String> res) throws IOException,
CommandNeedRetryException {
+ public boolean fetch(List res) throws IOException, CommandNeedRetryException
{
sink.reset(res);
+ int rowsRet = work.getLeastNumRows();
+ if (rowsRet <= 0) {
+ rowsRet = work.getLimit() >= 0 ? Math.min(work.getLimit() - totalRows,
maxRows) : maxRows;
+ }
try {
- int rowsRet = work.getLeastNumRows();
- if (rowsRet <= 0) {
- rowsRet = work.getLimit() >= 0 ? Math.min(work.getLimit() - totalRows,
maxRows) : maxRows;
- }
if (rowsRet <= 0) {
fetch.clearFetchContext();
return false;
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java
Mon Dec 16 01:51:47 2013
@@ -18,19 +18,15 @@
package org.apache.hadoop.hive.ql.exec;
-import java.util.ArrayList;
+import java.util.List;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ListSinkDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe;
-import org.apache.hadoop.hive.serde2.SerDe;
-import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.util.ReflectionUtils;
/**
@@ -39,45 +35,43 @@ import org.apache.hadoop.util.Reflection
*/
public class ListSinkOperator extends Operator<ListSinkDesc> {
- private transient SerDe mSerde;
+ public static final String OUTPUT_FORMATTER = "output.formatter";
- private transient ArrayList<String> res;
+ private transient List res;
+ private transient FetchFormatter fetcher;
private transient int numRows;
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
try {
- mSerde = initializeSerde(hconf);
- initializeChildren(hconf);
+ fetcher = initializeFetcher(hconf);
} catch (Exception e) {
throw new HiveException(e);
}
+ super.initializeOp(hconf);
}
- private SerDe initializeSerde(Configuration conf) throws Exception {
- String serdeName = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE);
- Class<? extends SerDe> serdeClass = Class.forName(serdeName, true,
- JavaUtils.getClassLoader()).asSubclass(SerDe.class);
- // cast only needed for Hadoop 0.17 compatibility
- SerDe serde = ReflectionUtils.newInstance(serdeClass, null);
-
- Properties serdeProp = new Properties();
-
- // this is the default serialization format
- if (serde instanceof DelimitedJSONSerDe) {
- serdeProp.put(serdeConstants.SERIALIZATION_FORMAT, "" +
Utilities.tabCode);
- serdeProp.put(serdeConstants.SERIALIZATION_NULL_FORMAT,
getConf().getSerializationNullFormat());
+ private FetchFormatter initializeFetcher(Configuration conf) throws
Exception {
+ String formatterName = conf.get(OUTPUT_FORMATTER);
+ FetchFormatter fetcher;
+ if (formatterName != null && !formatterName.isEmpty()) {
+ Class<? extends FetchFormatter> fetcherClass =
Class.forName(formatterName, true,
+ JavaUtils.getClassLoader()).asSubclass(FetchFormatter.class);
+ fetcher = ReflectionUtils.newInstance(fetcherClass, null);
+ } else {
+ fetcher = new DefaultFetchFormatter();
}
- serde.initialize(conf, serdeProp);
- return serde;
- }
- public ListSinkOperator initialize(SerDe mSerde) {
- this.mSerde = mSerde;
- return this;
+ // selectively used by fetch formatter
+ Properties props = new Properties();
+ props.put(serdeConstants.SERIALIZATION_FORMAT, "" + Utilities.tabCode);
+ props.put(serdeConstants.SERIALIZATION_NULL_FORMAT,
getConf().getSerializationNullFormat());
+
+ fetcher.initialize(conf, props);
+ return fetcher;
}
- public void reset(ArrayList<String> res) {
+ public void reset(List res) {
this.res = res;
this.numRows = 0;
}
@@ -86,11 +80,12 @@ public class ListSinkOperator extends Op
return numRows;
}
+ @SuppressWarnings("unchecked")
public void processOp(Object row, int tag) throws HiveException {
try {
- res.add(mSerde.serialize(row, outputObjInspector).toString());
+ res.add(fetcher.convert(row, inputObjInspectors[0]));
numRows++;
- } catch (SerDeException e) {
+ } catch (Exception e) {
throw new HiveException(e);
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Mon Dec
16 01:51:47 2013
@@ -450,6 +450,10 @@ public abstract class Operator<T extends
this.inputObjInspectors = inputObjInspectors;
}
+ public ObjectInspector getOutputObjInspector() {
+ return outputObjInspector;
+ }
+
/**
* Process the row.
*
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java Mon Dec 16
01:51:47 2013
@@ -30,7 +30,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.lib.Node;
@@ -89,7 +88,7 @@ public abstract class Task<T extends Ser
public static enum FeedType {
DYNAMIC_PARTITIONS, // list of dynamic partitions
- };
+ }
// Bean methods
@@ -168,12 +167,6 @@ public abstract class Task<T extends Ser
*/
protected abstract int execute(DriverContext driverContext);
- // dummy method - FetchTask overwrites this
- public boolean fetch(ArrayList<String> res) throws IOException,
CommandNeedRetryException {
- assert false;
- return false;
- }
-
public boolean isRootTask() {
return rootTask;
}
@@ -251,8 +244,6 @@ public abstract class Task<T extends Ser
childTsk.removeFromChildrenTasks();
}
}
-
- return;
}
Modified:
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
(original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Mon
Dec 16 01:51:47 2013
@@ -27,6 +27,7 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -191,6 +192,28 @@ public final class SerDeUtils {
return (escape.toString());
}
+ /**
+ * Convert a Object to a standard Java object in compliance with JDBC 3.0
(see JDBC 3.0
+ * Specification, Table B-3: Mapping from JDBC Types to Java Object Types).
+ *
+ * This method is kept consistent with {@link
HiveResultSetMetaData#hiveTypeToSqlType}.
+ */
+ public static Object toThriftPayload(Object val, ObjectInspector valOI) {
+ if (valOI.getCategory() == ObjectInspector.Category.PRIMITIVE) {
+ Object obj = ObjectInspectorUtils.copyToStandardObject(val, valOI,
+ ObjectInspectorUtils.ObjectInspectorCopyOption.JAVA);
+ if (((PrimitiveObjectInspector)valOI).getPrimitiveCategory() ==
+ PrimitiveObjectInspector.PrimitiveCategory.BINARY) {
+ // todo HIVE-5269
+ return new String((byte[])obj);
+ }
+ return obj;
+ }
+ // for now, expose non-primitive as a string
+ // TODO: expose non-primitive as a structured object while maintaining
JDBC compliance
+ return SerDeUtils.getJSONString(val, valOI);
+ }
+
public static String getJSONString(Object o, ObjectInspector oi) {
return getJSONString(o, oi, JSON_NULL);
}
Modified:
hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
URL:
http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
---
hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
(original)
+++
hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
Mon Dec 16 01:51:47 2013
@@ -22,7 +22,6 @@ import java.io.IOException;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.ArrayList;
-import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@@ -41,11 +40,10 @@ import org.apache.hadoop.hive.ql.process
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.BytesWritable;
@@ -226,75 +224,75 @@ public class SQLOperation extends Execut
return resultSchema;
}
+ private transient final List<Object> convey = new ArrayList<Object>();
@Override
public RowSet getNextRowSet(FetchOrientation orientation, long maxRows)
throws HiveSQLException {
- assertState(OperationState.FINISHED);
validateDefaultFetchOrientation(orientation);
- ArrayList<String> rows = new ArrayList<String>();
- driver.setMaxRows((int)maxRows);
+ assertState(OperationState.FINISHED);
try {
/* if client is requesting fetch-from-start and its not the first time
reading from this operation
- * then reset the fetch position to beginging
+ * then reset the fetch position to beginning
*/
if (orientation.equals(FetchOrientation.FETCH_FIRST) && fetchStarted) {
driver.resetFetch();
}
fetchStarted = true;
- driver.getResults(rows);
-
- getSerDe();
- StructObjectInspector soi = (StructObjectInspector)
serde.getObjectInspector();
- List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
- RowSet rowSet = new RowSet();
-
- Object[] deserializedFields = new Object[fieldRefs.size()];
- Object rowObj;
- ObjectInspector fieldOI;
-
- for (String rowString : rows) {
- rowObj = serde.deserialize(new BytesWritable(rowString.getBytes()));
- for (int i = 0; i < fieldRefs.size(); i++) {
- StructField fieldRef = fieldRefs.get(i);
- fieldOI = fieldRef.getFieldObjectInspector();
- deserializedFields[i] =
convertLazyToJava(soi.getStructFieldData(rowObj, fieldRef), fieldOI);
- }
- rowSet.addRow(resultSchema, deserializedFields);
+ driver.setMaxRows((int) maxRows);
+ if (driver.getResults(convey)) {
+ return decode(convey);
}
- return rowSet;
+ return new RowSet();
} catch (IOException e) {
throw new HiveSQLException(e);
} catch (CommandNeedRetryException e) {
throw new HiveSQLException(e);
} catch (Exception e) {
throw new HiveSQLException(e);
+ } finally {
+ convey.clear();
}
}
- /**
- * Convert a LazyObject to a standard Java object in compliance with JDBC
3.0 (see JDBC 3.0
- * Specification, Table B-3: Mapping from JDBC Types to Java Object Types).
- *
- * This method is kept consistent with {@link
HiveResultSetMetaData#hiveTypeToSqlType}.
- */
- private static Object convertLazyToJava(Object o, ObjectInspector oi) {
- Object obj = ObjectInspectorUtils.copyToStandardObject(o, oi,
ObjectInspectorCopyOption.JAVA);
-
- if (obj == null) {
- return null;
- }
- if(oi.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) {
- return new String((byte[])obj);
+ private RowSet decode(List<Object> rows) throws Exception {
+ if (driver.isFetchingTable()) {
+ return prepareFromRow(rows);
}
- // for now, expose non-primitive as a string
- // TODO: expose non-primitive as a structured object while maintaining
JDBC compliance
- if (oi.getCategory() != ObjectInspector.Category.PRIMITIVE) {
- return SerDeUtils.getJSONString(o, oi);
+ return decodeFromString(rows);
+ }
+
+ // already encoded to thrift-able object in ThriftFormatter
+ private RowSet prepareFromRow(List<Object> rows) throws Exception {
+ RowSet rowSet = new RowSet();
+ for (Object row : rows) {
+ rowSet.addRow(resultSchema, (Object[]) row);
}
- return obj;
+ return rowSet;
}
+ private RowSet decodeFromString(List<Object> rows) throws SQLException,
SerDeException {
+ getSerDe();
+ StructObjectInspector soi = (StructObjectInspector)
serde.getObjectInspector();
+ List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
+ RowSet rowSet = new RowSet();
+
+ Object[] deserializedFields = new Object[fieldRefs.size()];
+ Object rowObj;
+ ObjectInspector fieldOI;
+
+ for (Object rowString : rows) {
+ rowObj = serde.deserialize(new
BytesWritable(((String)rowString).getBytes()));
+ for (int i = 0; i < fieldRefs.size(); i++) {
+ StructField fieldRef = fieldRefs.get(i);
+ fieldOI = fieldRef.getFieldObjectInspector();
+ Object fieldData = soi.getStructFieldData(rowObj, fieldRef);
+ deserializedFields[i] = SerDeUtils.toThriftPayload(fieldData, fieldOI);
+ }
+ rowSet.addRow(resultSchema, deserializedFields);
+ }
+ return rowSet;
+ }
private SerDe getSerDe() throws SQLException {
if (serde != null) {
@@ -302,8 +300,6 @@ public class SQLOperation extends Execut
}
try {
List<FieldSchema> fieldSchemas = mResultSchema.getFieldSchemas();
- List<String> columnNames = new ArrayList<String>();
- List<String> columnTypes = new ArrayList<String>();
StringBuilder namesSb = new StringBuilder();
StringBuilder typesSb = new StringBuilder();
@@ -313,8 +309,6 @@ public class SQLOperation extends Execut
namesSb.append(",");
typesSb.append(",");
}
- columnNames.add(fieldSchemas.get(pos).getName());
- columnTypes.add(fieldSchemas.get(pos).getType());
namesSb.append(fieldSchemas.get(pos).getName());
typesSb.append(fieldSchemas.get(pos).getType());
}
Modified:
hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
URL:
http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1551103&r1=1551102&r2=1551103&view=diff
==============================================================================
---
hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
(original)
+++
hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
Mon Dec 16 01:51:47 2013
@@ -32,6 +32,8 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.ql.exec.FetchFormatter;
+import org.apache.hadoop.hive.ql.exec.ListSinkOperator;
import org.apache.hadoop.hive.ql.history.HiveHistory;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.common.util.HiveVersionInfo;
@@ -88,6 +90,9 @@ public class HiveSessionImpl implements
// set an explicit session name to control the download directory name
hiveConf.set(ConfVars.HIVESESSIONID.varname,
sessionHandle.getHandleIdentifier().toString());
+ // use thrift transportable formatter
+ hiveConf.set(ListSinkOperator.OUTPUT_FORMATTER,
+ FetchFormatter.ThriftFormatter.class.getName());
sessionState = new SessionState(hiveConf);
SessionState.start(sessionState);
}