Github user fhueske commented on a diff in the pull request:

    https://github.com/apache/flink/pull/3277#discussion_r100771883
  
    --- Diff: 
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala
 ---
    @@ -428,6 +431,123 @@ abstract class TableEnvironment(val config: 
TableConfig) {
         (fieldNames.toArray, fieldIndexes.toArray)
       }
     
    +  /**
    +    * Creates a final converter that maps the internal row type to 
external type.
    +    *
    +    * @param physicalRowTypeInfo the input of the sink
    +    * @param logicalRowType the logical type with correct field names 
(esp. for POJO field mapping)
    +    * @param expectedTypeInfo the outptu type of the sink
    +    * @param functionName name of the map function. Must not be unique but 
has to be a
    +    *                     valid Java class identifier.
    +    */
    +  protected def sinkConversion[T](
    +      physicalRowTypeInfo: TypeInformation[Row],
    +      logicalRowType: RelDataType,
    +      expectedTypeInfo: TypeInformation[T],
    +      functionName: String)
    +    : Option[MapFunction[Row, T]] = {
    +
    +    // validate that at least the field types of physical and logical type 
match
    +    // we do that here to make sure that plan translation was correct
    +    val logicalRowTypeInfo = 
FlinkTypeFactory.toInternalRowTypeInfo(logicalRowType)
    +    if (physicalRowTypeInfo != logicalRowTypeInfo) {
    +      throw TableException("The field types of physical and logical row 
types do not match." +
    +        "This is a bug and should not happen. Please file an issue.")
    +    }
    +
    +    // expected type is a row, no conversion needed
    +    // TODO this logic will change with FLINK-5429
    +    if (expectedTypeInfo.getTypeClass == classOf[Row]) {
    +      return None
    +    }
    +
    +    // convert to type information
    +    val logicalFieldTypes = logicalRowType.getFieldList.asScala map { 
relDataType =>
    +      FlinkTypeFactory.toTypeInfo(relDataType.getType)
    +    }
    +    // field names
    +    val logicalFieldNames = logicalRowType.getFieldNames.asScala
    +
    +    // validate expected type
    +    if (expectedTypeInfo.getArity != logicalFieldTypes.length) {
    +      throw new TableException("Arity of result does not match expected 
type.")
    +    }
    +    expectedTypeInfo match {
    +
    +      // POJO type expected
    +      case pt: PojoTypeInfo[_] =>
    +        logicalFieldNames.zip(logicalFieldTypes) foreach {
    +          case (fName, fType) =>
    +            val pojoIdx = pt.getFieldIndex(fName)
    +            if (pojoIdx < 0) {
    +              throw new TableException(s"POJO does not define field name: 
$fName")
    +            }
    +            val expectedTypeInfo = pt.getTypeAt(pojoIdx)
    +            if (fType != expectedTypeInfo) {
    +              throw new TableException(s"Result field does not match 
expected type. " +
    +                s"Expected: $expectedTypeInfo; Actual: $fType")
    +            }
    +        }
    +
    +      // Tuple/Case class type expected
    +      case ct: TupleTypeInfoBase[_] =>
    +        logicalFieldTypes.zipWithIndex foreach {
    +          case (fieldTypeInfo, i) =>
    +            val expectedTypeInfo = ct.getTypeAt(i)
    +            if (fieldTypeInfo != expectedTypeInfo) {
    +              throw new TableException(s"Result field does not match 
expected type. " +
    +                s"Expected: $expectedTypeInfo; Actual: $fieldTypeInfo")
    +            }
    +        }
    +
    +      // Atomic type expected
    +      case at: AtomicType[_] =>
    +        if (logicalFieldTypes.size != 1) {
    +          throw new TableException(s"Result does not have a single field, 
" +
    --- End diff --
    
    Improve error message: "Requested result type is an atomic type but result 
has more than a single field."


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

Reply via email to