kgyrtkirk commented on a change in pull request #970: Hive 23100
URL: https://github.com/apache/hive/pull/970#discussion_r408753089
 
 

 ##########
 File path: 
ql/src/java/org/apache/hadoop/hive/ql/parse/type/RexNodeExprFactory.java
 ##########
 @@ -0,0 +1,1021 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.type;
+
+import com.google.common.collect.ImmutableList;
+import java.math.BigDecimal;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import org.apache.calcite.avatica.util.TimeUnit;
+import org.apache.calcite.plan.RelOptCluster;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.rex.RexCall;
+import org.apache.calcite.rex.RexInputRef;
+import org.apache.calcite.rex.RexLiteral;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.rex.RexSubQuery;
+import org.apache.calcite.rex.RexUtil;
+import org.apache.calcite.sql.SqlCollation;
+import org.apache.calcite.sql.SqlIntervalQualifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.fun.SqlQuantifyOperator;
+import org.apache.calcite.sql.fun.SqlStdOperatorTable;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.util.ConversionUtil;
+import org.apache.calcite.util.DateString;
+import org.apache.calcite.util.NlsString;
+import org.apache.calcite.util.TimestampString;
+import org.apache.commons.lang3.math.NumberUtils;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
+import 
org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
+import 
org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException;
+import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil;
+import 
org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRexExprList;
+import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import 
org.apache.hadoop.hive.ql.parse.type.RexNodeExprFactory.HiveNlsString.Interpretation;
+import org.apache.hadoop.hive.ql.plan.SubqueryType;
+import org.apache.hadoop.hive.ql.udf.SettableUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Expression factory for Calcite {@link RexNode}.
+ */
+public class RexNodeExprFactory extends ExprFactory<RexNode> {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(RexNodeExprFactory.class);
+
+  private final RexBuilder rexBuilder;
+  private final FunctionHelper functionHelper;
+
+  public RexNodeExprFactory(RexBuilder rexBuilder) {
+    this.rexBuilder = rexBuilder;
+    this.functionHelper = new HiveFunctionHelper(rexBuilder);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected boolean isExprInstance(Object o) {
+    return o instanceof RexNode;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode toExpr(ColumnInfo colInfo, RowResolver rowResolver, int 
offset)
+      throws CalciteSemanticException {
+    ObjectInspector inspector = colInfo.getObjectInspector();
+    if (inspector instanceof ConstantObjectInspector && inspector instanceof 
PrimitiveObjectInspector) {
+      return toPrimitiveConstDesc(colInfo, inspector, rexBuilder);
+    }
+    int index = rowResolver.getPosition(colInfo.getInternalName());
+    if (index < 0) {
+      throw new CalciteSemanticException("Unexpected error: Cannot find 
column");
+    }
+    return rexBuilder.makeInputRef(
+        TypeConverter.convert(colInfo.getType(), rexBuilder.getTypeFactory()), 
index + offset);
+  }
+
+  private static RexNode toPrimitiveConstDesc(
+      ColumnInfo colInfo, ObjectInspector inspector, RexBuilder rexBuilder)
+      throws CalciteSemanticException {
+    Object constant = ((ConstantObjectInspector) 
inspector).getWritableConstantValue();
+    return rexBuilder.makeLiteral(constant,
+        TypeConverter.convert(colInfo.getType(), rexBuilder.getTypeFactory()),
+        false);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createColumnRefExpr(ColumnInfo colInfo, RowResolver 
rowResolver, int offset)
+      throws CalciteSemanticException {
+    int index = rowResolver.getPosition(colInfo.getInternalName());
+    return rexBuilder.makeInputRef(
+        TypeConverter.convert(colInfo.getType(), rexBuilder.getTypeFactory()), 
index + offset);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createColumnRefExpr(ColumnInfo colInfo, List<RowResolver> 
rowResolverList)
+      throws SemanticException {
+    int index = getPosition(colInfo, rowResolverList);
+    return rexBuilder.makeInputRef(
+        TypeConverter.convert(colInfo.getType(), rexBuilder.getTypeFactory()), 
index);
+  }
+
+  private int getPosition(ColumnInfo colInfo, List<RowResolver> 
rowResolverList)
+      throws SemanticException {
+    ColumnInfo tmp;
+    ColumnInfo cInfoToRet = null;
+    int position = 0;
+    for (RowResolver rr : rowResolverList) {
+      tmp = rr.get(colInfo.getTabAlias(), colInfo.getAlias());
+      if (tmp != null) {
+        if (cInfoToRet != null) {
+          throw new CalciteSemanticException("Could not resolve column name");
+        }
+        cInfoToRet = tmp;
+        position += rr.getPosition(cInfoToRet.getInternalName());
+      } else if (cInfoToRet == null) {
+        position += rr.getColumnInfos().size();
+      }
+    }
+    return position;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createNullConstantExpr() {
+    return rexBuilder.makeNullLiteral(
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.NULL));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createBooleanConstantExpr(String value) {
+    Boolean b = value != null ? Boolean.valueOf(value) : null;
+    return rexBuilder.makeLiteral(b,
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.BOOLEAN),
+        false);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createBigintConstantExpr(String value) {
+    return rexBuilder.makeLiteral(
+        new BigDecimal(Long.valueOf(value)),
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.BIGINT),
+        false);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createIntConstantExpr(String value) {
+    return rexBuilder.makeLiteral(
+        new BigDecimal(Integer.valueOf(value)),
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.INTEGER),
+        false);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createSmallintConstantExpr(String value) {
+    return rexBuilder.makeLiteral(
+        new BigDecimal(Short.valueOf(value)),
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.SMALLINT),
+        false);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createTinyintConstantExpr(String value) {
+    return rexBuilder.makeLiteral(
+        new BigDecimal(Byte.valueOf(value)),
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.TINYINT),
+        false);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createFloatConstantExpr(String value) {
+    Float f = Float.valueOf(value);
+    return rexBuilder.makeApproxLiteral(
+        new BigDecimal(Float.toString(f)),
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.FLOAT));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createDoubleConstantExpr(String value) throws 
SemanticException {
+    Double d = Double.valueOf(value);
+    // TODO: The best solution is to support NaN in expression reduction.
+    if (Double.isNaN(d)) {
+      throw new CalciteSemanticException("NaN", 
UnsupportedFeature.Invalid_decimal);
+    }
+    return rexBuilder.makeApproxLiteral(
+        new BigDecimal(Double.toString(d)),
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.DOUBLE));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexNode createDecimalConstantExpr(String value, boolean 
allowNullValueConstantExpr) {
+    HiveDecimal hd = HiveDecimal.create(value);
+    if (!allowNullValueConstantExpr && hd == null) {
+      return null;
+    }
+    DecimalTypeInfo type = adjustType(hd);
+    return rexBuilder.makeExactLiteral(
+        hd != null ? hd.bigDecimalValue() : null,
+        TypeConverter.convert(type, rexBuilder.getTypeFactory()));
+  }
+
+  @Override
+  protected TypeInfo adjustConstantType(PrimitiveTypeInfo targetType, Object 
constantValue) {
+    if (constantValue instanceof HiveDecimal) {
+      return adjustType((HiveDecimal) constantValue);
+    }
+    return targetType;
+  }
+
+  private DecimalTypeInfo adjustType(HiveDecimal hd) {
+    // Note: the normalize() call with rounding in HiveDecimal will currently 
reduce the
+    //       precision and scale of the value by throwing away trailing 
zeroes. This may or may
+    //       not be desirable for the literals; however, this used to be the 
default behavior
+    //       for explicit decimal literals (e.g. 1.0BD), so we keep this 
behavior for now.
+    int prec = 1;
+    int scale = 0;
+    if (hd != null) {
+      prec = hd.precision();
+      scale = hd.scale();
+    }
+    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+    return typeInfo;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected Object interpretConstantAsPrimitive(PrimitiveTypeInfo targetType, 
Object constantValue,
+      PrimitiveTypeInfo sourceType) {
+    // Extract string value if necessary
+    Object constantToInterpret = constantValue;
+    if (constantValue instanceof HiveNlsString) {
+      constantToInterpret = ((HiveNlsString) constantValue).getValue();
+    }
+
+    if (constantToInterpret instanceof Number || constantToInterpret 
instanceof String) {
+      try {
+        PrimitiveTypeEntry primitiveTypeEntry = 
targetType.getPrimitiveTypeEntry();
+        if 
(PrimitiveObjectInspectorUtils.intTypeEntry.equals(primitiveTypeEntry)) {
+          return toBigDecimal(constantToInterpret.toString()).intValueExact();
+        } else if 
(PrimitiveObjectInspectorUtils.longTypeEntry.equals(primitiveTypeEntry)) {
+          return toBigDecimal(constantToInterpret.toString()).longValueExact();
+        } else if 
(PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(primitiveTypeEntry)) {
+          return Double.valueOf(constantToInterpret.toString());
+        } else if 
(PrimitiveObjectInspectorUtils.floatTypeEntry.equals(primitiveTypeEntry)) {
+          return Float.valueOf(constantToInterpret.toString());
+        } else if 
(PrimitiveObjectInspectorUtils.byteTypeEntry.equals(primitiveTypeEntry)) {
+          return toBigDecimal(constantToInterpret.toString()).byteValueExact();
+        } else if 
(PrimitiveObjectInspectorUtils.shortTypeEntry.equals(primitiveTypeEntry)) {
+          return 
toBigDecimal(constantToInterpret.toString()).shortValueExact();
+        } else if 
(PrimitiveObjectInspectorUtils.decimalTypeEntry.equals(primitiveTypeEntry)) {
+          HiveDecimal decimal = 
HiveDecimal.create(constantToInterpret.toString());
+          return decimal != null ? decimal.bigDecimalValue() : null;
+        }
+      } catch (NumberFormatException | ArithmeticException nfe) {
+        LOG.trace("Failed to narrow type of constant", nfe);
+        return null;
+      }
+    }
+
+    // Comparision of decimal and float/double happens in float/double.
+    if (constantToInterpret instanceof BigDecimal) {
+      BigDecimal bigDecimal = (BigDecimal) constantToInterpret;
+
+      PrimitiveTypeEntry primitiveTypeEntry = 
targetType.getPrimitiveTypeEntry();
+      if 
(PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(primitiveTypeEntry)) {
+        return bigDecimal.doubleValue();
+      } else if 
(PrimitiveObjectInspectorUtils.floatTypeEntry.equals(primitiveTypeEntry)) {
+        return bigDecimal.floatValue();
+      }
+      return bigDecimal;
+    }
+
+    String constTypeInfoName = sourceType.getTypeName();
+    if (constTypeInfoName.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)) {
+      // because a comparison against a "string" will happen in "string" type.
+      // to avoid unintentional comparisons in "string"
+      // constants which are representing char/varchar values must be 
converted to the
+      // appropriate type.
+      if (targetType instanceof CharTypeInfo) {
+        final String constValue = constantToInterpret.toString();
+        final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
+        HiveChar newValue = new HiveChar(constValue, length);
+        HiveChar maxCharConst = new HiveChar(constValue, 
HiveChar.MAX_CHAR_LENGTH);
+        if (maxCharConst.equals(newValue)) {
+          return makeHiveUnicodeString(Interpretation.CHAR, 
newValue.getValue());
+        } else {
+          return null;
+        }
+      }
+      if (targetType instanceof VarcharTypeInfo) {
+        final String constValue = constantToInterpret.toString();
+        final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
+        HiveVarchar newValue = new HiveVarchar(constValue, length);
+        HiveVarchar maxCharConst = new HiveVarchar(constValue, 
HiveVarchar.MAX_VARCHAR_LENGTH);
+        if (maxCharConst.equals(newValue)) {
+          return makeHiveUnicodeString(Interpretation.VARCHAR, 
newValue.getValue());
+        } else {
+          return null;
+        }
+      }
+    }
+
+    return constantValue;
+  }
+
+  private BigDecimal toBigDecimal(String val) {
+    if (!NumberUtils.isNumber(val)) {
+      throw new NumberFormatException("The given string is not a valid number: 
" + val);
+    }
+    return new BigDecimal(val.replaceAll("[dDfFlL]$", ""));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createStringConstantExpr(String value) {
+    return rexBuilder.makeCharLiteral(
+        makeHiveUnicodeString(Interpretation.STRING, value));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createDateConstantExpr(String value) {
+    Date d = Date.valueOf(value);
+    return rexBuilder.makeDateLiteral(
+        DateString.fromDaysSinceEpoch(d.toEpochDay()));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createTimestampConstantExpr(String value) {
+    Timestamp t = Timestamp.valueOf(value);
+    return (RexLiteral) rexBuilder.makeLiteral(
+        
TimestampString.fromMillisSinceEpoch(t.toEpochMilli()).withNanos(t.getNanos()),
+        rexBuilder.getTypeFactory().createSqlType(
+            SqlTypeName.TIMESTAMP,
+            
rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP)),
+        false);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createTimestampLocalTimeZoneConstantExpr(String value, 
ZoneId zoneId) {
+    TimestampTZ t = TimestampTZUtil.parse(value);
+
+    final TimestampString tsLocalTZString;
+    if (value == null) {
+      tsLocalTZString = null;
+    } else {
+      Instant i = t.getZonedDateTime().toInstant();
+      tsLocalTZString = TimestampString
+          .fromMillisSinceEpoch(i.toEpochMilli())
+          .withNanos(i.getNano());
+    }
+    return rexBuilder.makeTimestampWithLocalTimeZoneLiteral(
+        tsLocalTZString,
+        
rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createIntervalYearMonthConstantExpr(String value) {
+    BigDecimal totalMonths = 
BigDecimal.valueOf(HiveIntervalYearMonth.valueOf(value).getTotalMonths());
+    return rexBuilder.makeIntervalLiteral(totalMonths,
+        new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new 
SqlParserPos(1, 1)));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createIntervalDayTimeConstantExpr(String value) {
+    HiveIntervalDayTime v = HiveIntervalDayTime.valueOf(value);
+    BigDecimal secsValueBd = BigDecimal
+        .valueOf(v.getTotalSeconds() * 1000);
+    BigDecimal nanosValueBd = BigDecimal.valueOf((v).getNanos(), 6);
+    return rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd),
+        new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new
+            SqlParserPos(1, 1)));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createIntervalYearConstantExpr(String value) {
+    HiveIntervalYearMonth v = new 
HiveIntervalYearMonth(Integer.parseInt(value), 0);
+    BigDecimal totalMonths = BigDecimal.valueOf(v.getTotalMonths());
+    return rexBuilder.makeIntervalLiteral(totalMonths,
+        new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new 
SqlParserPos(1, 1)));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected RexLiteral createIntervalMonthConstantExpr(String value) {
 
 Review comment:
   all these methods are present on the interface - I'm wondering if there is 
some other way...
   probably moving the literal related stuff into a separate thing might be 
beneficial  - but the factory is right now templated ; so that would mean to 
pass something along with it to convert the constants...I don't have any good 
idea right now....but I'll keep thinking...

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to