[ 
https://issues.apache.org/jira/browse/HDDS-8276?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17704716#comment-17704716
 ] 

George Huang commented on HDDS-8276:
------------------------------------

use tpcds_bin_partitioned_orc_1000_ozone_with_ec;
describe formatted time_dim;
{+}-----------------------------{-}{{-}}{-}{-}{+}------------------------------------------------{-}{{-}}++{{-}}{-}---------------------
|col_name|data_type|comment|

{+}-----------------------------{-}{{-}}{-}{-}{+}------------------------------------------------{-}{{-}}++{{-}}{-}---------------------
|t_time_sk|bigint| |
|t_time_id|char(16)| |
|t_time|int| |
|t_hour|int| |
|t_minute|int| |
|t_second|int| |
|t_am_pm|char(2)| |
|t_shift|char(20)| |
|t_sub_shift|char(20)| |
|t_meal_time|char(20)| |
| |NULL|NULL|
| # Detailed Table Information|NULL|NULL|
|Database:|tpcds_bin_partitioned_orc_1000_ozone_with_ec|NULL|
|OwnerType:|USER|NULL|
|Owner:|systest|NULL|
|CreateTime:|Wed Mar 22 22:11:23 PDT 2023|NULL|
|LastAccessTime:|UNKNOWN|NULL|
|Retention:|0|NULL|
|Location:|ofs://ozone1/vol9/rs63-fso1/managed/hive/time_dim|NULL|
|Table Type:|MANAGED_TABLE|NULL|
|Table Parameters:|NULL|NULL|
| |bucketing_version|2|
| |numFiles|1|
| |numFilesErasureCoded|1|
| |totalSize|128186|
| |transactional|true|
| |transactional_properties|default|
| |transient_lastDdlTime|1679548283|
| |NULL|NULL|
| # Storage Information|NULL|NULL|
|SerDe Library:|org.apache.hadoop.hive.ql.io.orc.OrcSerde|NULL|
|InputFormat:|org.apache.hadoop.hive.ql.io.orc.OrcInputFormat|NULL|
|OutputFormat:|org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat|NULL|
|Compressed:|No|NULL|
|Num Buckets:|-1|NULL|
|Bucket Columns:|[]|NULL|
|Sort Columns:|[]|NULL|

{+}-----------------------------{-}{{-}}{-}{-}{+}------------------------------------------------{-}{{-}}++{{-}}{-}---------------------

> Hive managed table creation failed with org.apache.hadoop.fs.PathIOException: 
> Fail to get block checksum 
> ---------------------------------------------------------------------------------------------------------
>
>                 Key: HDDS-8276
>                 URL: https://issues.apache.org/jira/browse/HDDS-8276
>             Project: Apache Ozone
>          Issue Type: Bug
>          Components: EC
>            Reporter: George Huang
>            Priority: Major
>
> SQL Query:
> {code:java}
> create table time_dim
> stored as orc
> as select * from tpcds_text_1000_ozone_with_ec.time_dim{code}
> Exception:
> {code:java}
> ERROR : Failed org.apache.hadoop.hive.ql.metadata.HiveException: 
> org.apache.hadoop.fs.PathIOException: `Volume: vol9 Bucket: rs63-fso1 
> managed/hive/time_dim/delta_0000001_0000001_0000/bucket_00000': Fail to get 
> block checksum for {blockID={containerID=10089, localID=111677748019473182}, 
> length=128186, offset=0, token=Kind: HDDS_BLOCK_TOKEN, Service: conID: 10089 
> locID: 111677748019473182, Ident: (block_token_identifier (ownerId=hive, 
> expiry=2023-03-24T05:11:23.998Z, certSerialId=19668354085781127, 
> blockId=conID: 10089 locID: 111677748019473182, access modes=[READ], 
> maxLength=128186)), pipeline=Pipeline[ Id: 
> 775c3b7b-0e33-4274-8f6c-1e8eb272d034, Nodes: 
> 97a1a1e2-46cc-4a05-ab6c-303b5ca6404f(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)4f0728d8-41aa-4764-aab5-1c9e33711a5c(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)691c8cfb-7ee9-415f-b1d6-19f58fceb113(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)8ead1f4f-7ebb-43ef-af27-63284d1a6afc(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)8db60085-65af-4289-baf7-b80564debe42(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)1b0a9854-37da-434c-9301-7588bca84be5(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)62bb3414-30b8-43ae-9878-b7187879f5aa(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)95f42f05-534b-458c-b127-d8cdbcd2e0ea(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx)d8969dba-7571-4034-bc13-d3b53e342263(xxxxxx.xxxxx.xxxxxxxx.com/xx.xx.xxx.xx),
>  ReplicationConfig: EC/ECReplicationConfig{data=6, parity=3, 
> ecChunkSize=1048576, codec=rs}, State:OPEN, leaderId:, 
> CreationTimestamp2023-03-22T22:03:20.268-07:00[America/Los_Angeles]], 
> createVersion=0, partNumber=0}, checksum combine mode : {}COMPOSITE_CRC at 
> org.apache.hadoop.hive.ql.metadata.Hive.addWriteNotificationLog(Hive.java:3565)
>  ~[hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.createTableNonReplaceMode(CreateTableOperation.java:166)
>  ~[hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.execute(CreateTableOperation.java:100)
>  ~[hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:82) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:357) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:330) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:246) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.Executor.execute(Executor.java:109) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:769) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:504) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:498) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:166) 
> [hive-exec-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:226)
>  [hive-service-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hive.service.cli.operation.SQLOperation.access$700(SQLOperation.java:88)
>  [hive-service-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:327)
>  [hive-service-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> java.security.AccessController.doPrivileged(Native Method) ~[?:?] at 
> javax.security.auth.Subject.doAs(Subject.java:423) [?:?] at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1898)
>  [hadoop-common-3.1.1.7.1.8.11-3.jar:?] at 
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:345)
>  [hive-service-3.1.3000.7.1.8.11-3.jar:3.1.3000.7.1.8.11-3] at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) [?:?] 
> at java.util.concurrent.FutureTask.run(FutureTask.java:264) [?:?] at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) [?:?] 
> at java.util.concurrent.FutureTask.run(FutureTask.java:264) [?:?] at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
>  [?:?] at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
>  [?:?] at java.lang.Thread.run(Thread.java:834) [?:?]{code}
>  



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to