when i read hive data,by spark2.2.0 sql.  get the exception:
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute,
tree:
Exchange hashpartitioning(pid#1, 200)
+- *HashAggregate(keys=[pid#1], functions=[partial_sum(expnum#0L),
partial_sum(outnum#2L)], output=[pid#1, sum#655L, sum#656L])
   +- *Project [expnum#0L, pid#1, outnum#2L]                                    
                                                                                
                                                                                
                 
      +- SortMergeJoin [deal_id#28, partitiontime#57], [deal_id#165,
partitiontime#221], LeftOuter                                                   
                                                                                
                            
         :- *Sort [deal_id#28 ASC NULLS FIRST, partitiontime#57 ASC NULLS
FIRST], false, 0                                                                
                                                                                
                       
         :  +- Exchange hashpartitioning(deal_id#28, partitiontime#57, 200)     
                                                                                
                                                                                
                 
         :     +- *Project [partitiontime#57, deal_id#28, expnum#0L, pid#1]     
                                                                                
                                                                                
                 
         :        +- *SortMergeJoin [cast(deal_id#28 as int)], [id#60],
Inner                                                                           
                                                                                
                         
         :           :- *Sort [cast(deal_id#28 as int) ASC NULLS FIRST],
false, 0                                                                        
                                                                                
                        
         :           :  +- Exchange hashpartitioning(cast(deal_id#28 as
int), 200)                                                                      
                                                                                
                         
         :           :     +- *HashAggregate(keys=[partitiontime#57,
deal_id#28], functions=[count(1)], output=[partitiontime#57, deal_id#28,
expnum#0L])                                                                     
                                   
         :           :        +- Exchange hashpartitioning(partitiontime#57,
deal_id#28, 200)                                                                
                                                                                
                    
         :           :           +- *HashAggregate(keys=[partitiontime#57,
deal_id#28], functions=[partial_count(1)], output=[partitiontime#57,
deal_id#28, count#658L])                                                        
                                 
         :           :              +- *Project [deal_id#28,
partitiontime#57]                                                               
                                                                                
                                    
         :           :                 +- *Filter
(((((isnotnull(from_source#15) && isnotnull(pos_type#10)) && (from_source#15
= 2)) && (pos_type#10 = home)) && platform#16 IN
(Android,android,iphone,iPhone)) && isnotnull(deal_id#28))                      
 
         :           :                    +- HiveTableScan [platform#16,
deal_id#28, from_source#15, partitiontime#57, pos_type#10], CatalogRelation
`default`.`exposure`, org.apache.hadoop.hive.ql.io.orc.OrcSerde, [ip#8,
log_time#9, pos_type#10,  pos_value#11, user_id#12, device_id#13,
cookie_id#14, from_source#15, platform#16, version#17, channel#18,
c_detail#19, user_role#20, user_type#21, school#22, child#23,
list_version#24, tags#25, url#26, refer#27, deal_id#28, deal_n#29,
deal_x#30, deal_y#3 1, ... 24 more fields], [partitionmonth#56,
partitiontime#57, partitionfromsource#58, hour#59],
[isnotnull(partitiontime#57), (partitiontime#57 = 20171130)]                    
                                                                                
 
         :           +- *Sort [id#60 ASC NULLS FIRST], false, 0                 
                                                                                
                                                                                
                 
         :              +- Exchange hashpartitioning(id#60, 200)                
                                                                                
                                                                                
                 
         :                 +- *Project [id#60, CASE WHEN
(cast(source_type#131 as int) = 1) THEN zid#132 WHEN (cast(source_type#131
as int) = 0) THEN taobao_id#77 ELSE cast(id#60 as string) END AS pid#1]         
                                             
         :                    +- *Filter (((length(zid#132) > 0) ||
(length(taobao_id#77) > 0)) && isnotnull(id#60))                                
                                                                                
                             
         :                       +- HiveTableScan [id#60, source_type#131,
zid#132, taobao_id#77], CatalogRelation `default`.`deals`,
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [id#60, price#61,
list_price#62, sales_volume#63, oos#64, prior ity#65, publish_status#66,
referrer#67, tag_id#68, tao_type#69, anhao_count#70, deal_id#71, title#72,
url_name#73, recommend_reason#74, wap_url#75, taobao_cps_url#76,
taobao_id#77, taobao_nick#78, begin_time#79, end_time#80, created_at#81,
jump_type#82, ta obao_cid#83, ... 80 more fields]                               
                                                                                
                                                                                
                                  
         +- *Sort [deal_id#165 ASC NULLS FIRST, partitiontime#221 ASC NULLS
FIRST], false, 0                                                                
                                                                                
                     
            +- Exchange hashpartitioning(deal_id#165, partitiontime#221,
200)                                                                            
                                                                                
                        
               +- *HashAggregate(keys=[partitiontime#221, deal_id#165],
functions=[count(1)], output=[partitiontime#221, deal_id#165, outnum#2L])       
                                                                                
                         
                  +- Exchange hashpartitioning(partitiontime#221,
deal_id#165, 200)                                                               
                                                                                
                               
                     +- *HashAggregate(keys=[partitiontime#221,
deal_id#165], functions=[partial_count(1)], output=[partitiontime#221,
deal_id#165, count#660L])                                                       
                                          
                        +- *Project [deal_id#165, partitiontime#221]            
                                                                                
                                                                                
                 
                           +- *Filter ((((isnotnull(jump_source#164) &&
isnotnull(pos_type#188)) && (jump_source#164 = 2)) && (pos_type#188 = home))
&& platform#169 IN (Android,android,iphone,iPhone))                             
                            
                              +- HiveTableScan [platform#169, pos_type#188,
jump_source#164, partitiontime#221, deal_id#165], CatalogRelation
`default`.`out`, org.apache.hadoop.hive.ql.io.orc.OrcSerde,
[jump_source#164, deal_id#165, create_time#166,  tao800_user_id#167,
source#168, platform#169, device_id#170, version#171, channel_id#172,
c_type#173, c_id#174, school_code#175, share_type#176, m_id#177,
utm_source#178, utm_medium#179, utm_term#180, utm_content#181,
utm_campaign#182, utm_entrance#183, s ession_id#184, ju_version#185,
ga_user_id#186, brand_version#187, ... 31 more fields], [out_type#219,
partitionmonth#220, partitiontime#221], [isnotnull(partitiontime#221),
(partitiontime#221 = 20171130)]                                                 
    



--
Sent from: http://apache-spark-user-list.1001560.n3.nabble.com/

---------------------------------------------------------------------
To unsubscribe e-mail: user-unsubscr...@spark.apache.org

Reply via email to