dawidwys commented on code in PR #23365:
URL: https://github.com/apache/flink/pull/23365#discussion_r1316935241
##########
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/lookups/FieldReferenceLookup.java:
##########
@@ -75,40 +95,89 @@ public Optional<FieldReferenceExpression>
lookupField(String name) {
* @return concatenated list of fields of all inputs.
*/
public List<FieldReferenceExpression> getAllInputFields() {
- return fieldReferences.stream().flatMap(input ->
input.values().stream()).collect(toList());
+ return getInputFields(Collections.emptyList());
}
- private static List<Map<String, FieldReferenceExpression>>
prepareFieldReferences(
+ /**
+ * Gives matching fields of underlying inputs in order of those inputs and
order of fields
+ * within input.
+ *
+ * @return concatenated list of matching fields of all inputs.
+ */
+ public List<FieldReferenceExpression> getInputFields(
+ List<ColumnExpansionStrategy> expansionStrategies) {
+ return fieldReferences.stream()
+ .flatMap(input -> input.values().stream())
+ .filter(fieldRef -> includeExpandedColumn(fieldRef.column,
expansionStrategies))
+ .map(FieldReference::toExpr)
+ .collect(toList());
+ }
+
+ private static List<Map<String, FieldReference>> prepareFieldReferences(
List<QueryOperation> queryOperations) {
return IntStream.range(0, queryOperations.size())
.mapToObj(idx ->
prepareFieldsInInput(queryOperations.get(idx), idx))
.collect(Collectors.toList());
}
- private static Map<String, FieldReferenceExpression> prepareFieldsInInput(
+ private static Map<String, FieldReference> prepareFieldsInInput(
QueryOperation input, int inputIdx) {
ResolvedSchema resolvedSchema = input.getResolvedSchema();
return IntStream.range(0, resolvedSchema.getColumnCount())
- .mapToObj(
- i ->
- new FieldReferenceExpression(
- resolvedSchema.getColumnNames().get(i),
-
resolvedSchema.getColumnDataTypes().get(i),
- inputIdx,
- i))
+ .mapToObj(i -> new
FieldReference(resolvedSchema.getColumns().get(i), inputIdx, i))
.collect(
Collectors.toMap(
- FieldReferenceExpression::getName,
+ fieldRef -> fieldRef.column.getName(),
Function.identity(),
(fieldRef1, fieldRef2) -> {
- throw
failAmbiuguousColumn(fieldRef1.getName());
+ throw
failAmbiguousColumn(fieldRef1.column.getName());
},
// we need to maintain order of fields within
input for resolving
// e.g. '*' reference
LinkedHashMap::new));
}
- private static ValidationException failAmbiuguousColumn(String name) {
+ private static ValidationException failAmbiguousColumn(String name) {
return new ValidationException("Ambiguous column name: " + name);
}
+
+ //
--------------------------------------------------------------------------------------------
+ // Shared code with SQL validator
+ //
--------------------------------------------------------------------------------------------
+
+ public static boolean includeExpandedColumn(
+ Column column, List<ColumnExpansionStrategy> strategies) {
+ for (ColumnExpansionStrategy strategy : strategies) {
+ switch (strategy) {
+ case EXCLUDE_ALIASED_VIRTUAL_METADATA_COLUMNS:
+ {
+ if (!(column instanceof Column.MetadataColumn)) {
+ continue;
+ }
+ final Column.MetadataColumn metadataColumn =
(Column.MetadataColumn) column;
+ if (metadataColumn.isVirtual()
+ &&
metadataColumn.getMetadataKey().isPresent()) {
+ return false;
+ }
+ }
+ break;
Review Comment:
Could we already extract this to a separate method?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]