This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-table-store.git
The following commit(s) were added to refs/heads/master by this push:
new 7f83e286 [hotfix] Fix compile errors in spark module
7f83e286 is described below
commit 7f83e286b9ae6a9a48b503b3d94cac792bd0a7fb
Author: JingsongLi <[email protected]>
AuthorDate: Fri Jun 17 16:39:18 2022 +0800
[hotfix] Fix compile errors in spark module
---
.../java/org/apache/flink/table/store/spark/SparkReaderFactory.java | 2 +-
.../src/main/java/org/apache/flink/table/store/spark/SparkScan.java | 2 +-
.../java/org/apache/flink/table/store/spark/SparkScanBuilder.java | 4 ++--
.../src/main/java/org/apache/flink/table/store/spark/SparkTable.java | 2 +-
4 files changed, 5 insertions(+), 5 deletions(-)
diff --git
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkReaderFactory.java
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkReaderFactory.java
index 17ae24cb..2e792aff 100644
---
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkReaderFactory.java
+++
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkReaderFactory.java
@@ -47,7 +47,7 @@ public class SparkReaderFactory implements
PartitionReaderFactory {
}
private RowType readRowType() {
- return TypeUtils.project(table.rowType(), projectedFields);
+ return TypeUtils.project(table.schema().logicalRowType(),
projectedFields);
}
@Override
diff --git
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScan.java
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScan.java
index 2167f970..d8ee8d5f 100644
---
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScan.java
+++
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScan.java
@@ -62,7 +62,7 @@ public class SparkScan implements Scan,
SupportsReportStatistics {
@Override
public StructType readSchema() {
- return
SparkTypeUtils.fromFlinkRowType(TypeUtils.project(table.rowType(),
projectedFields));
+ return
SparkTypeUtils.fromFlinkRowType(TypeUtils.project(table.schema().logicalRowType(),
projectedFields));
}
@Override
diff --git
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScanBuilder.java
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScanBuilder.java
index f26566cc..be639c1d 100644
---
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScanBuilder.java
+++
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkScanBuilder.java
@@ -47,7 +47,7 @@ public class SparkScanBuilder
@Override
public Filter[] pushFilters(Filter[] filters) {
- SparkFilterConverter converter = new
SparkFilterConverter(table.rowType());
+ SparkFilterConverter converter = new
SparkFilterConverter(table.schema().logicalRowType());
List<Predicate> predicates = new ArrayList<>();
List<Filter> pushed = new ArrayList<>();
for (Filter filter : filters) {
@@ -70,7 +70,7 @@ public class SparkScanBuilder
@Override
public void pruneColumns(StructType requiredSchema) {
String[] pruneFields = requiredSchema.fieldNames();
- List<String> fieldNames = table.rowType().getFieldNames();
+ List<String> fieldNames = table.schema().fieldNames();
int[] projected = new int[pruneFields.length];
for (int i = 0; i < projected.length; i++) {
projected[i] = fieldNames.indexOf(pruneFields[i]);
diff --git
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkTable.java
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkTable.java
index ed4a1997..0d1648ae 100644
---
a/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkTable.java
+++
b/flink-table-store-spark/src/main/java/org/apache/flink/table/store/spark/SparkTable.java
@@ -52,7 +52,7 @@ public class SparkTable implements
org.apache.spark.sql.connector.catalog.Table,
@Override
public StructType schema() {
- return SparkTypeUtils.fromFlinkRowType(table.rowType());
+ return
SparkTypeUtils.fromFlinkRowType(table.schema().logicalRowType());
}
@Override