xuefuz commented on a change in pull request #8813: [FLINK-12891][hive] remove
hadoop/hive writable from boundaries of Hive functions and Flink
URL: https://github.com/apache/flink/pull/8813#discussion_r296411116
##########
File path:
flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
##########
@@ -181,86 +168,22 @@ private static ConstantObjectInspector
getPrimitiveJavaConstantObjectInspector(P
*/
public static HiveObjectConversion getConversion(ObjectInspector
inspector, DataType dataType) {
if (inspector instanceof PrimitiveObjectInspector) {
- if (inspector instanceof JavaBooleanObjectInspector) {
- if (((JavaBooleanObjectInspector)
inspector).preferWritable()) {
- return o -> new
BooleanWritable((Boolean) o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaStringObjectInspector) {
- if (((StringObjectInspector)
inspector).preferWritable()) {
- return o -> new Text((String) o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaByteObjectInspector) {
- if (((JavaByteObjectInspector)
inspector).preferWritable()) {
- return o -> new ByteWritable((Byte) o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaShortObjectInspector) {
- if (((JavaShortObjectInspector)
inspector).preferWritable()) {
- return o -> new ShortWritable((Short)
o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof JavaIntObjectInspector)
{
- if (((JavaIntObjectInspector)
inspector).preferWritable()) {
- return o -> new IntWritable((Integer)
o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaLongObjectInspector) {
- if (((JavaLongObjectInspector)
inspector).preferWritable()) {
- return o -> new LongWritable((Long) o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaFloatObjectInspector) {
- if (((JavaFloatObjectInspector)
inspector).preferWritable()) {
- return o -> new FloatWritable((Float)
o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaDoubleObjectInspector) {
- if (((JavaDoubleObjectInspector)
inspector).preferWritable()) {
- return o -> new DoubleWritable((Double)
o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaDateObjectInspector) {
- if (((JavaDateObjectInspector)
inspector).preferWritable()) {
- return o -> new DateWritable((Date) o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaTimestampObjectInspector) {
- if (((JavaTimestampObjectInspector)
inspector).preferWritable()) {
- return o -> new
TimestampWritable((Timestamp) o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaBinaryObjectInspector) {
- if (((JavaBinaryObjectInspector)
inspector).preferWritable()) {
- return o -> new BytesWritable((byte[])
o);
- } else {
- return IdentityConversion.INSTANCE;
- }
- } else if (inspector instanceof
JavaHiveCharObjectInspector) {
- if (((JavaHiveCharObjectInspector)
inspector).preferWritable()) {
- return o -> new HiveCharWritable(
- new HiveChar((String) o,
((CharType) dataType.getLogicalType()).getLength()));
- } else {
- return o -> new HiveChar((String) o,
((CharType) dataType.getLogicalType()).getLength());
- }
- } else if (inspector instanceof
JavaHiveVarcharObjectInspector) {
- if (((JavaHiveVarcharObjectInspector)
inspector).preferWritable()) {
- return o -> new HiveVarcharWritable(
- new HiveVarchar((String) o,
((VarCharType) dataType.getLogicalType()).getLength()));
- } else {
- return o -> new HiveVarchar((String) o,
((VarCharType) dataType.getLogicalType()).getLength());
- }
+ if (inspector instanceof BooleanObjectInspector ||
+ inspector instanceof
StringObjectInspector ||
+ inspector instanceof
ByteObjectInspector ||
+ inspector instanceof
ShortObjectInspector ||
+ inspector instanceof IntObjectInspector
||
+ inspector instanceof
LongObjectInspector ||
+ inspector instanceof
FloatObjectInspector ||
+ inspector instanceof
DoubleObjectInspector ||
+ inspector instanceof
DateObjectInspector ||
+ inspector instanceof
TimestampObjectInspector ||
+ inspector instanceof
BinaryObjectInspector) {
+ return IdentityConversion.INSTANCE;
+ } else if (inspector instanceof
HiveCharObjectInspector) {
+ return o -> new HiveChar((String) o,
((CharType) dataType.getLogicalType()).getLength());
Review comment:
I don't think this is about a single source of truth. It's a data
conversion. We are converting data from Flink to Hive. While data type is char
on both end, they may come with different precision. For instance, the data
could be char(10), while hive's data type is char(5). During the conversion,
the data should be enforced with precision 5, not 10.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services