[ 
https://issues.apache.org/jira/browse/HIVE-20105?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16534832#comment-16534832
 ] 

Dileep Kumar Chiguruvada commented on HIVE-20105:
-------------------------------------------------

Table ddl:
{code}
drop table if exists misc_store_sales_denormalized_subset;
SET hive.ctas.external.tables=true;

CREATE EXTERNAL TABLE misc_store_sales_denormalized_subset
STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
TBLPROPERTIES ("external.table.purge" = "true") AS
SELECT CAST(d_date AS TIMESTAMP) AS `__time`,
       ss_ticket_number,
       ss_quantity,
       CAST(ss_wholesale_cost AS DOUBLE) AS `ss_wholesale_cost`,
       CAST(ss_list_price AS DOUBLE) AS `ss_list_price`,
       CAST(ss_sales_price AS DOUBLE) AS `ss_sales_price`,
       CAST(ss_ext_discount_amt AS DOUBLE) AS `ss_ext_discount_amt`,
       CAST(ss_ext_sales_price AS DOUBLE) AS `ss_ext_sales_price`,
       CAST(ss_ext_wholesale_cost AS DOUBLE) AS `ss_ext_wholesale_cost`,
       CAST(ss_ext_list_price AS DOUBLE) AS `ss_ext_list_price`,
       CAST(ss_ext_tax AS DOUBLE) AS `ss_ext_tax`,
       CAST(ss_coupon_amt AS DOUBLE) AS `ss_coupon_amt`,
       CAST(ss_net_paid AS DOUBLE) AS `ss_net_paid`,
       CAST(ss_net_paid_inc_tax AS DOUBLE) AS `ss_net_paid_inc_tax`,
       CAST(ss_net_profit AS DOUBLE) AS `ss_net_profit`,
       i_item_id,
       i_rec_start_date,
       i_rec_end_date,
       i_item_desc,
       CAST(i_current_price AS DOUBLE) AS `i_current_price`,
       CAST(i_wholesale_cost AS DOUBLE) AS `i_wholesale_cost`,
       CAST(i_brand_id AS STRING) AS `i_brand_id`,
       i_brand,
       CAST(i_class_id AS STRING) AS `i_class_id`,
       i_class,
       CAST(i_category_id AS STRING) AS `i_category_id`,
       i_category,
       CAST(i_manufact_id AS STRING) AS `i_manufact_id`,
       i_manufact,
       i_size,
       i_formulation,
       i_color,
       i_units,
       i_container,
       CAST(i_manager_id AS STRING) AS `i_manager_id`,
       i_product_name,
       c_customer_id,
       c_salutation,
       c_first_name,
       c_last_name,
       c_preferred_cust_flag,
       CAST(c_birth_day AS STRING) AS `c_birth_day`,
       CAST(c_birth_month AS STRING) AS `c_birth_month`,
       CAST(c_birth_year AS STRING) AS `c_birth_year`,
       c_birth_country,
       c_login,
       c_email_address,
       c_last_review_date,
       ca_address_id,
       ca_street_number,
       ca_street_name,
       ca_street_type,
       ca_suite_number,
       ca_city,
       ca_county,
       ca_state,
       ca_zip,
       ca_country,
       CAST(ca_gmt_offset AS STRING) AS `ca_gmt_offset`,
       ca_location_type,
       s_store_id,
       s_rec_start_date,
       s_rec_end_date,
       s_store_name,
       CAST(s_number_employees AS BIGINT) AS `s_number_employees`,
       CAST(s_floor_space AS BIGINT) AS `s_floor_space`,
       s_hours,
       s_manager,
       CAST(s_market_id AS STRING) AS `s_market_id`,
       s_geography_class,
       s_market_desc,
       s_market_manager,
       CAST(s_division_id AS STRING) AS `s_division_id`,
       s_division_name,
       CAST(s_company_id AS STRING) AS `s_company_id`,
       s_company_name,
       s_street_number,
       s_street_name,
       s_street_type,
       s_suite_number,
       s_city,
       s_county,
       s_state,
       s_zip,
       s_country,
       CAST(s_gmt_offset AS STRING) AS `s_gmt_offset`,
       CAST(s_tax_precentage AS DOUBLE) AS `s_tax_precentage`
FROM date_dim,
     store_sales,
     item,
     customer,
     customer_address,
     store
WHERE d_date_sk = ss_sold_date_sk
   AND ss_item_sk = i_item_sk
   AND ss_customer_sk = c_customer_sk 
   AND c_current_addr_sk = ca_address_sk
   AND ss_store_sk = s_store_sk
   AND i_manager_id=7
   AND EXTRACT(MONTH FROM CAST(d_date AS TIMESTAMP))=11
   AND EXTRACT(YEAR FROM CAST(d_date AS TIMESTAMP))=1999
   AND substr(ca_zip,1,5) <> substr(s_zip,1,5);
{code}



> Druid-Hive: tpcds query on timestamp throws 
> java.lang.IllegalArgumentException: Cannot create timestamp, parsing error
> ----------------------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-20105
>                 URL: https://issues.apache.org/jira/browse/HIVE-20105
>             Project: Hive
>          Issue Type: Bug
>          Components: Hive
>    Affects Versions: 3.0.0
>            Reporter: Dileep Kumar Chiguruvada
>            Assignee: Nishant Bangarwa
>            Priority: Major
>             Fix For: 3.0.0
>
>
> Druid-Hive: tpcds query on timestamp trows 
> java.lang.IllegalArgumentException: Cannot create timestamp, parsing error.
> {code}
> SELECT `__time`, max(ss_quantity), sum(ss_wholesale_cost)
>                           FROM misc_store_sales_denormalized_subset
>                           GROUP BY `__time`;
> INFO  : Compiling 
> command(queryId=hive_20180705123007_dd94e295-9e3e-440e-9818-2e7f8458f06d): 
> SELECT `__time`, max(ss_quantity), sum(ss_wholesale_cost)
> FROM misc_store_sales_denormalized_subset
> GROUP BY `__time`
> INFO  : Semantic Analysis Completed (retrial = false)
> INFO  : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, 
> type:timestamp, comment:null), FieldSchema(name:$f1, type:int, comment:null), 
> FieldSchema(name:$f2, type:double, comment:null)], properties:null)
> INFO  : Completed compiling 
> command(queryId=hive_20180705123007_dd94e295-9e3e-440e-9818-2e7f8458f06d); 
> Time taken: 0.143 seconds
> INFO  : Executing 
> command(queryId=hive_20180705123007_dd94e295-9e3e-440e-9818-2e7f8458f06d): 
> SELECT `__time`, max(ss_quantity), sum(ss_wholesale_cost)
> FROM misc_store_sales_denormalized_subset
> GROUP BY `__time`
> INFO  : Completed executing 
> command(queryId=hive_20180705123007_dd94e295-9e3e-440e-9818-2e7f8458f06d); 
> Time taken: 0.003 seconds
> INFO  : OK
> org.apache.hive.service.cli.HiveSQLException: java.io.IOException: 
> java.lang.IllegalArgumentException: Cannot create timestamp, parsing error
> Closing: 0: 
> jdbc:hive2://ctr-e138-1518143905142-397384-01-000006.hwx.site:2181,ctr-e138-1518143905142-397384-01-000005.hwx.site:2181,ctr-e138-1518143905142-397384-01-000004.hwx.site:2181,ctr-e138-1518143905142-397384-01-000007.hwx.site:2181,ctr-e138-1518143905142-397384-01-000008.hwx.site:2181/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2-interactive;principal=hive/[email protected]
> {code}
> Facing this issue after removed condition to create Druid Hive table with 
> (TIMESTAMP with local time zone)
> {code}
> -SELECT CAST(d_date AS TIMESTAMP with local time zone) AS `__time`,
> +SELECT CAST(d_date AS TIMESTAMP) AS `__time`,
> {code}
> create table with
> SELECT CAST(d_date AS TIMESTAMP with local time zone) AS `__time . works fine
> HSI log:
> {code}
> 2018-07-05T12:30:08,297 INFO  [6b9ca95f-3aee-44cc-b2eb-2aa9bdec2b38 
> HiveServer2-Handler-Pool: Thread-326]: session.SessionState 
> (SessionState.java:resetThreadName(449)) - Resetting thread name to  
> HiveServer2-Handler-Pool: Thread-326
> 2018-07-05T12:30:08,297 WARN  [HiveServer2-Handler-Pool: Thread-326]: 
> thrift.ThriftCLIService (ThriftCLIService.java:FetchResults(795)) - Error 
> fetching results:
> org.apache.hive.service.cli.HiveSQLException: java.io.IOException: 
> java.lang.IllegalArgumentException: Cannot create timestamp, parsing error
>         at 
> org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:465)
>  ~[hive-service-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.cli.operation.OperationManager.getOperationNextRowSet(OperationManager.java:309)
>  ~[hive-service-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.cli.session.HiveSessionImpl.fetchResults(HiveSessionImpl.java:905)
>  ~[hive-service-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.cli.CLIService.fetchResults(CLIService.java:561) 
> ~[hive-service-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.cli.thrift.ThriftCLIService.FetchResults(ThriftCLIService.java:786)
>  [hive-service-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1837)
>  [hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1822)
>  [hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39) 
> [hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) 
> [hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingProcessor.process(HadoopThriftAuthBridge.java:647)
>  [hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
>  [hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  [?:1.8.0_112]
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  [?:1.8.0_112]
>         at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
> Caused by: java.io.IOException: java.lang.IllegalArgumentException: Cannot 
> create timestamp, parsing error
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:602)
>  ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:509) 
> ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at org.apache.hadoop.hive.ql.exec.FetchTask.fetch(FetchTask.java:146) 
> ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at org.apache.hadoop.hive.ql.Driver.getResults(Driver.java:2695) 
> ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.getResults(ReExecDriver.java:229)
>  ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:460)
>  ~[hive-service-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         ... 13 more
> Caused by: java.lang.IllegalArgumentException: Cannot create timestamp, 
> parsing error
>         at 
> org.apache.hadoop.hive.common.type.Timestamp.valueOf(Timestamp.java:163) 
> ~[hive-common-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hadoop.hive.druid.serde.DruidSerDe.deserialize(DruidSerDe.java:403)
>  ~[hive-druid-handler-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:583)
>  ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:509) 
> ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at org.apache.hadoop.hive.ql.exec.FetchTask.fetch(FetchTask.java:146) 
> ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at org.apache.hadoop.hive.ql.Driver.getResults(Driver.java:2695) 
> ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.getResults(ReExecDriver.java:229)
>  ~[hive-exec-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         at 
> org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:460)
>  ~[hive-service-3.1.0.3.0.0.0-1602.jar:3.1.0.3.0.0.0-1602]
>         ... 13 more
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to