openinx opened a new issue #2535:
URL: https://github.com/apache/iceberg/issues/2535
When I tried to create hive external table to read the iceberg table that
hosted in aliyun oss and catalog (aliyun dlf) services in the following SQL,
I will encounter an exception as the following:
```sql
CREATE EXTERNAL TABLE dlf_db.spark_test
STORED BY 'org.apache.iceberg.mr.hive.HiveIcebergStorageHandler'
TBLPROPERTIES (
'iceberg.catalog'='dlf_catalog',
'iceberg.catalog.dlf_catalog.type'='dlf',
'iceberg.catalog.dlf_catalog.io-impl'='org.apache.iceberg.aliyun.oss.OSSFileIO',
'iceberg.catalog.dlf_catalog.oss.endpoint'='*****************************',
'iceberg.catalog.dlf_catalog.access.key.id'='*****************************',
'iceberg.catalog.dlf_catalog.access.key.secret'='*****************************',
'iceberg.catalog.dlf_catalog.catalog-impl'='org.apache.iceberg.aliyun.dlf.DlfCatalog',
'iceberg.catalog.dlf_catalog.warehouse'='oss://iceberg-test/warehouse',
'iceberg.catalog.dlf_catalog.dlf.catalog-id'='*****************************',
'iceberg.catalog.dlf_catalog.dlf.endpoint'='*****************************',
'iceberg.catalog.dlf_catalog.dlf.region-id'='*****************************'
);
```
The stacktrace:
```
2021-04-28T10:37:07,913 WARN [e1741bc4-1b6e-43cd-87f1-32b51d7b9ee0 main]
hadoop.HadoopTableOperations: Error reading version hint file
oss://iceberg-test/warehouse/dlf_db.db/small/metadata/version-hint.text
java.io.FileNotFoundException:
oss://iceberg-test/warehouse/dlf_db.db/small/metadata/version-hint.text: No
such file or directory!
at
org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem.getFileStatus(AliyunOSSFileSystem.java:278)
~[hadoop-aliyun-2.9.2.jar:?]
at
org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem.open(AliyunOSSFileSystem.java:578)
~[hadoop-aliyun-2.9.2.jar:?]
at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:914)
~[hadoop-common-2.9.2.jar:?]
at
org.apache.iceberg.hadoop.HadoopTableOperations.findVersion(HadoopTableOperations.java:316)
~[iceberg-hive-runtime-8a20cc3.jar:?]
at
org.apache.iceberg.hadoop.HadoopTableOperations.refresh(HadoopTableOperations.java:99)
~[iceberg-hive-runtime-8a20cc3.jar:?]
at
org.apache.iceberg.hadoop.HadoopTableOperations.current(HadoopTableOperations.java:80)
~[iceberg-hive-runtime-8a20cc3.jar:?]
at org.apache.iceberg.hadoop.HadoopTables.load(HadoopTables.java:86)
~[iceberg-hive-runtime-8a20cc3.jar:?]
at org.apache.iceberg.mr.Catalogs.loadTable(Catalogs.java:110)
~[iceberg-hive-runtime-8a20cc3.jar:?]
at org.apache.iceberg.mr.Catalogs.loadTable(Catalogs.java:96)
~[iceberg-hive-runtime-8a20cc3.jar:?]
at
org.apache.iceberg.mr.hive.HiveIcebergSerDe.initialize(HiveIcebergSerDe.java:82)
~[iceberg-hive-runtime-8a20cc3.jar:?]
at
org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:54)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:533)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:450)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:437)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:281)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:263)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:641)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:624)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:831)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:867)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4356)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:354)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:199)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2183)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1839)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1526)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
~[hive-exec-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227)
~[hive-exec-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:233)
~[hive-cli-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:184)
~[hive-cli-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:403)
~[hive-cli-2.3.6.jar:2.3.6]
at
org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:821)
~[hive-cli-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
~[hive-cli-2.3.6.jar:2.3.6]
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:686)
~[hive-cli-2.3.6.jar:2.3.6]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_221]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_221]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_221]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_221]
at org.apache.hadoop.util.RunJar.run(RunJar.java:244)
~[hadoop-common-2.9.2.jar:?]
at org.apache.hadoop.util.RunJar.main(RunJar.java:158)
~[hadoop-common-2.9.2.jar:?]
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]