[ 
https://issues.apache.org/jira/browse/FLINK-11982?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

pingle wang updated FLINK-11982:
--------------------------------
    Description: 
java code :
{code:java}
val connector = FileSystem().path("data/in/test.json")
val desc = tEnv.connect(connector)
.withFormat(
  new Json().schema( 
                    Types.ROW(Array[String]("id", "name", "age"), 
                              Array[TypeInformation[_]](Types.STRING, 
Types.STRING, Types.INT)) 
            ) 
            .failOnMissingField(true) 
)
.registerTableSource("persion")

val sql = "select * from person"
val result = tEnv.sqlQuery(sql)
{code}
Exception info :
{code:java}
Exception in thread "main" 
org.apache.flink.table.api.NoMatchingTableFactoryException: Could not find a 
suitable table factory for 
'org.apache.flink.table.factories.BatchTableSourceFactory' in
the classpath.

Reason: No context matches.

The following properties are requested:
connector.path=file:///Users/batch/test.json
connector.property-version=1
connector.type=filesystem
format.derive-schema=true
format.fail-on-missing-field=true
format.property-version=1
format.type=json

The following factories have been considered:
org.apache.flink.table.sources.CsvBatchTableSourceFactory
org.apache.flink.table.sources.CsvAppendTableSourceFactory
org.apache.flink.table.sinks.CsvBatchTableSinkFactory
org.apache.flink.table.sinks.CsvAppendTableSinkFactory
org.apache.flink.formats.avro.AvroRowFormatFactory
org.apache.flink.formats.json.JsonRowFormatFactory
org.apache.flink.streaming.connectors.kafka.Kafka010TableSourceSinkFactory
org.apache.flink.streaming.connectors.kafka.Kafka09TableSourceSinkFactory

at 
org.apache.flink.table.factories.TableFactoryService$.filterByContext(TableFactoryService.scala:214)
at 
org.apache.flink.table.factories.TableFactoryService$.findInternal(TableFactoryService.scala:130)
at 
org.apache.flink.table.factories.TableFactoryService$.find(TableFactoryService.scala:81)
at 
org.apache.flink.table.factories.TableFactoryUtil$.findAndCreateTableSource(TableFactoryUtil.scala:44)
at 
org.apache.flink.table.descriptors.ConnectTableDescriptor.registerTableSource(ConnectTableDescriptor.scala:46)
at com.meitu.mlink.sql.batch.JsonExample.main(JsonExample.java:36){code}
 

  was:
java code :
{code:java}
val connector = FileSystem().path("data/in/test.json")
val desc = tEnv.connect(connector)
.withFormat(
  new Json().failOnMissingField(true)
).registerTableSource("persion")

val sql = "select * from person"
val result = tEnv.sqlQuery(sql)
{code}
Exception info :
{code:java}
Exception in thread "main" 
org.apache.flink.table.api.NoMatchingTableFactoryException: Could not find a 
suitable table factory for 
'org.apache.flink.table.factories.BatchTableSourceFactory' in
the classpath.

Reason: No context matches.

The following properties are requested:
connector.path=file:///Users/batch/test.json
connector.property-version=1
connector.type=filesystem
format.derive-schema=true
format.fail-on-missing-field=true
format.property-version=1
format.type=json

The following factories have been considered:
org.apache.flink.table.sources.CsvBatchTableSourceFactory
org.apache.flink.table.sources.CsvAppendTableSourceFactory
org.apache.flink.table.sinks.CsvBatchTableSinkFactory
org.apache.flink.table.sinks.CsvAppendTableSinkFactory
org.apache.flink.formats.avro.AvroRowFormatFactory
org.apache.flink.formats.json.JsonRowFormatFactory
org.apache.flink.streaming.connectors.kafka.Kafka010TableSourceSinkFactory
org.apache.flink.streaming.connectors.kafka.Kafka09TableSourceSinkFactory

at 
org.apache.flink.table.factories.TableFactoryService$.filterByContext(TableFactoryService.scala:214)
at 
org.apache.flink.table.factories.TableFactoryService$.findInternal(TableFactoryService.scala:130)
at 
org.apache.flink.table.factories.TableFactoryService$.find(TableFactoryService.scala:81)
at 
org.apache.flink.table.factories.TableFactoryUtil$.findAndCreateTableSource(TableFactoryUtil.scala:44)
at 
org.apache.flink.table.descriptors.ConnectTableDescriptor.registerTableSource(ConnectTableDescriptor.scala:46)
at com.meitu.mlink.sql.batch.JsonExample.main(JsonExample.java:36){code}
 


> BatchTableSourceFactory support Json Format File
> ------------------------------------------------
>
>                 Key: FLINK-11982
>                 URL: https://issues.apache.org/jira/browse/FLINK-11982
>             Project: Flink
>          Issue Type: Bug
>          Components: Table SQL / Ecosystem
>    Affects Versions: 1.6.4, 1.7.2, 1.8.0
>            Reporter: pingle wang
>            Assignee: frank wang
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 10m
>  Remaining Estimate: 0h
>
> java code :
> {code:java}
> val connector = FileSystem().path("data/in/test.json")
> val desc = tEnv.connect(connector)
> .withFormat(
>   new Json().schema( 
>                     Types.ROW(Array[String]("id", "name", "age"), 
>                               Array[TypeInformation[_]](Types.STRING, 
> Types.STRING, Types.INT)) 
>             ) 
>             .failOnMissingField(true) 
> )
> .registerTableSource("persion")
> val sql = "select * from person"
> val result = tEnv.sqlQuery(sql)
> {code}
> Exception info :
> {code:java}
> Exception in thread "main" 
> org.apache.flink.table.api.NoMatchingTableFactoryException: Could not find a 
> suitable table factory for 
> 'org.apache.flink.table.factories.BatchTableSourceFactory' in
> the classpath.
> Reason: No context matches.
> The following properties are requested:
> connector.path=file:///Users/batch/test.json
> connector.property-version=1
> connector.type=filesystem
> format.derive-schema=true
> format.fail-on-missing-field=true
> format.property-version=1
> format.type=json
> The following factories have been considered:
> org.apache.flink.table.sources.CsvBatchTableSourceFactory
> org.apache.flink.table.sources.CsvAppendTableSourceFactory
> org.apache.flink.table.sinks.CsvBatchTableSinkFactory
> org.apache.flink.table.sinks.CsvAppendTableSinkFactory
> org.apache.flink.formats.avro.AvroRowFormatFactory
> org.apache.flink.formats.json.JsonRowFormatFactory
> org.apache.flink.streaming.connectors.kafka.Kafka010TableSourceSinkFactory
> org.apache.flink.streaming.connectors.kafka.Kafka09TableSourceSinkFactory
> at 
> org.apache.flink.table.factories.TableFactoryService$.filterByContext(TableFactoryService.scala:214)
> at 
> org.apache.flink.table.factories.TableFactoryService$.findInternal(TableFactoryService.scala:130)
> at 
> org.apache.flink.table.factories.TableFactoryService$.find(TableFactoryService.scala:81)
> at 
> org.apache.flink.table.factories.TableFactoryUtil$.findAndCreateTableSource(TableFactoryUtil.scala:44)
> at 
> org.apache.flink.table.descriptors.ConnectTableDescriptor.registerTableSource(ConnectTableDescriptor.scala:46)
> at com.meitu.mlink.sql.batch.JsonExample.main(JsonExample.java:36){code}
>  



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to