xiaolan-bit commented on issue #6297:
URL: https://github.com/apache/hudi/issues/6297#issuecomment-1824010940

   it did not work. there are some jars below:
   flink1.17.1/lib:
   -rw-rw-rw- 1 hadoop hadoop   7304133 Nov 23 10:40 calcite-core-1.29.0.jar
   -rw-r--r-- 1 hadoop hadoop    196491 May 19  2023 flink-cep-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop    542620 May 19  2023 
flink-connector-files-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop    102472 May 19  2023 flink-csv-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop 135975541 May 19  2023 flink-dist-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop    180248 May 19  2023 flink-json-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop  21043319 May 19  2023 flink-scala_2.12-1.17.1.jar
   -rw-rw-rw- 1 hadoop hadoop  39256670 Nov 15 11:21 
flink-shaded-hadoop-3-3.1.1.7.2.8.0-224-9.0.jar
   -rw-rw-rw- 1 hadoop hadoop  51337510 Nov 22 16:35 
flink-sql-connector-hive-3.1.3_2.12-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop  15407424 May 19  2023 
flink-table-api-java-uber-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop  38191226 May 19  2023 
flink-table-planner-loader-1.17.1.jar
   -rw-r--r-- 1 hadoop hadoop   3146210 May 19  2023 
flink-table-runtime-1.17.1.jar
   -rw-rw-rw- 1 hadoop hadoop     31532 Nov 15 15:25 htrace-core-2.04.jar
   -rw-rw-rw- 1 hadoop hadoop   1475955 Nov 15 15:26 
htrace-core-3.1.0-incubating.jar
   -rw-rw-rw- 1 hadoop hadoop   1485102 Nov 15 15:26 
htrace-core4-4.0.1-incubating.jar
   -rw-rw-rw- 1 hadoop hadoop  94889852 Nov 23 16:39 
hudi-flink1.17-bundle-0.14.0.jar
   -rw-r--r-- 1 root   root    36360583 Nov 22 10:51 
hudi-hadoop-mr-bundle-0.12.3.jar
   -rw-r--r-- 1 root   root    36550073 Nov 22 10:51 
hudi-hive-sync-bundle-0.12.3.jar
   -rw-r--r-- 1 hadoop hadoop  30980105 Aug 28 16:07 
iceberg-flink-runtime-1.17-1.3.1.jar
   -rw-r--r-- 1 hadoop hadoop    208006 May 17  2023 log4j-1.2-api-2.17.1.jar
   -rw-r--r-- 1 hadoop hadoop    301872 May 17  2023 log4j-api-2.17.1.jar
   -rw-r--r-- 1 hadoop hadoop   1790452 May 17  2023 log4j-core-2.17.1.jar
   -rw-r--r-- 1 hadoop hadoop     24279 May 17  2023 log4j-slf4j-impl-2.17.1.jar
   -rw-rw-rw- 1 hadoop hadoop    909584 Nov 23 15:13 parquet-avro-1.13.0.jar
   -rw-rw-rw- 1 hadoop hadoop   2027225 Nov 23 12:46 parquet-column-1.13.0.jar
   -rw-rw-rw- 1 hadoop hadoop     97186 Nov 23 12:46 parquet-common-1.13.0.jar
   -rw-rw-rw- 1 hadoop hadoop    849289 Nov 23 12:46 parquet-encoding-1.13.0.jar
   -rw-rw-rw- 1 hadoop hadoop    726179 Nov 23 12:46 
parquet-format-structures-1.13.0.jar
   -rw-rw-rw- 1 hadoop hadoop   1004565 Nov 23 12:46 parquet-hadoop-1.13.0.jar
   -rw-rw-rw- 1 hadoop hadoop   2017387 Nov 23 12:46 parquet-jackson-1.13.0.jar
   and the Some commands are below:
   ./bin/yarn-session.sh -d
   ./bin/sql-client.sh embedded -j lib/hudi-flink1.17-bundle-0.14.0.jar shell
   
   set execution.checkpointing.interval=3sec;
   set sql-client.execution.result-mode = tableau;
   create table flink_hudi_hive (
   uuid STRING PRIMARY KEY NOT ENFORCED,
   name STRING, 
   age INT, 
   ts STRING, 
   `partition` STRING
   ) 
   PARTITIONED BY(`partition`) 
   WITH (
   'connector'='hudi', 
   'path'='hdfs://hdfs-ha/hudi/flink/flink_hudi_hive', 
   'table.type'='COPY_ON_WRITE', 
   'hoodie.datasource.write.recordkey.field'='uuid', 
   'write.precombine.field'='ts', 
   'write.tasks'='1', 
   'write.rate.Limit'='2000', 
   'compaction.tasks'='1', 
   'compaction.async.enabled'='true', 
   'compaction.trigger.strategy'='num_commits', 
   'compaction.delta_commits'='1', 
   'changelog.enabled'='true', 
   'read.streaming.check-interval'='3', 
   'hive_sync.enable'='true', 
   'hive_sync.mode'='hms', 
   'hive_sync.metastore.uris'='thrift://12345-az1-master-1-1:9083', 
   
'hive_sync.jdbc_url'='jdbc:hive2://12345-az1-master-1-1:10001/default;transportMode=http;httpPath=cliservice',
 
   'hive_sync.table'='flink_hudi_hive', 
   'hive_sync.db'='default',
   'hive_sync.support_timestamp'='true'
   );
   INSERT INTO flink_hudi_hive VALUES ('id1', 'Tom', 25, '1970-01-01 00:00:03', 
'par1'), ('id2', 'Jerry', 30, '1970-01-01 00:00:04', 'par1'), ('id3', 'Tom', 
25, '1970-01-01 00:00:03', 'par2'), ('id4', 'Jerry', 30, '1970-01-01 00:00:04', 
'par2'), ('id5', 'Spike', 35, '1970-01-01 00:00:05', 'par3'), ('id6', 'Tyke', 
40, '1970-01-01 00:00:06', 'par4'), ('id7', 'Butch', 45, '1970-01-01 00:00:07', 
'par4');
   
   when I enter:SELECT * FROM flink_hudi_hive;
   there are the error appear:
   2023-11-23 16:45:48
   java.lang.LinkageError: org/apache/parquet/column/ColumnDescriptor
        at 
org.apache.flink.formats.parquet.vector.reader.AbstractColumnReader.<init>(AbstractColumnReader.java:108)
        at 
org.apache.flink.formats.parquet.vector.reader.BytesColumnReader.<init>(BytesColumnReader.java:35)
        at 
org.apache.hudi.table.format.cow.ParquetSplitReaderUtil.createColumnReader(ParquetSplitReaderUtil.java:364)
        at 
org.apache.hudi.table.format.cow.ParquetSplitReaderUtil.createColumnReader(ParquetSplitReaderUtil.java:329)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.readNextRowGroup(ParquetColumnarRowSplitReader.java:334)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.nextBatch(ParquetColumnarRowSplitReader.java:310)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.ensureBatch(ParquetColumnarRowSplitReader.java:292)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.reachedEnd(ParquetColumnarRowSplitReader.java:271)
        at 
org.apache.hudi.table.format.ParquetSplitRecordIterator.hasNext(ParquetSplitRecordIterator.java:42)
        at 
org.apache.hudi.table.format.cow.CopyOnWriteInputFormat.reachedEnd(CopyOnWriteInputFormat.java:283)
        at 
org.apache.flink.streaming.api.functions.source.InputFormatSourceFunction.run(InputFormatSourceFunction.java:89)
        at 
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:110)
        at 
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:67)
        at 
org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:333)
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to