[ 
https://issues.apache.org/jira/browse/HIVE-10288?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Nezih Yigitbasi updated HIVE-10288:
-----------------------------------
    Description: 
Just pulled the trunk and built the hive binary. If I create a permanent udf 
and exit the cli, and then open the cli and try calling the udf it fails with 
the exception below. However, the call succeeds if I call the udf right after 
registering the permanent udf (without exiting the cli). The call also succeeds 
with the apache-hive-1.0.0 release.

{code}
15-04-13 17:04:54,004 INFO  org.apache.hadoop.hive.ql.log.PerfLogger 
(PerfLogger.java:PerfLogEnd(148)) - </PERFLOG method=parse start=1428969893115 
end=1428969894004 duration=889 from=org.apache.hadoop.hive.ql.Driver>
2015-04-13 17:04:54,007 DEBUG org.apache.hadoop.hive.ql.Driver 
(Driver.java:recordValidTxns(939)) - Encoding valid txns info 
9223372036854775807:
2015-04-13 17:04:54,007 INFO  org.apache.hadoop.hive.ql.log.PerfLogger 
(PerfLogger.java:PerfLogBegin(121)) - <PERFLOG method=semanticAnalyze 
from=org.apache.hadoop.hive.ql.Driver>
2015-04-13 17:04:54,052 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:analyzeInternal(9997)) - Starting Semantic Analysis
2015-04-13 17:04:54,053 DEBUG org.apache.hadoop.hive.ql.exec.FunctionRegistry 
(FunctionRegistry.java:getGenericUDAFResolver(942)) - Looking up GenericUDAF: 
hour_now
2015-04-13 17:04:54,053 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genResolvedParseTree(9980)) - Completed phase 1 of 
Semantic Analysis
2015-04-13 17:04:54,053 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:getMetaData(1530)) - Get metadata for source tables
2015-04-13 17:04:54,054 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore 
(HiveMetaStore.java:logInfo(744)) - 0: get_table : db=default tbl=test_table
2015-04-13 17:04:54,054 INFO  
org.apache.hadoop.hive.metastore.HiveMetaStore.audit 
(HiveMetaStore.java:logAuditEvent(369)) - ugi=nyigitbasi     ip=unknown-ip-addr 
     cmd=get_table : db=default tbl=test_table
2015-04-13 17:04:54,054 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
(ObjectStore.java:debugLog(6776)) - Open transaction: count = 1, isActive = 
true at:
        
org.apache.hadoop.hive.metastore.ObjectStore.getTable(ObjectStore.java:927)
2015-04-13 17:04:54,054 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
(ObjectStore.java:debugLog(6776)) - Open transaction: count = 2, isActive = 
true at:
        
org.apache.hadoop.hive.metastore.ObjectStore.getMTable(ObjectStore.java:990)
2015-04-13 17:04:54,104 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
(ObjectStore.java:debugLog(6776)) - Commit transaction: count = 1, isactive 
true at:
        
org.apache.hadoop.hive.metastore.ObjectStore.getMTable(ObjectStore.java:998)
2015-04-13 17:04:54,232 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
(ObjectStore.java:debugLog(6776)) - Commit transaction: count = 0, isactive 
true at:
        
org.apache.hadoop.hive.metastore.ObjectStore.getTable(ObjectStore.java:929)
2015-04-13 17:04:54,242 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:getMetaData(1682)) - Get metadata for subqueries
2015-04-13 17:04:54,247 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:getMetaData(1706)) - Get metadata for destination tables
2015-04-13 17:04:54,256 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genResolvedParseTree(9984)) - Completed getting MetaData 
in Semantic Analysis
2015-04-13 17:04:54,259 INFO  
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer 
(CalcitePlanner.java:canHandleAstForCbo(369)) - Not invoking CBO because the 
statement has too few joins
2015-04-13 17:04:54,344 DEBUG 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe 
(LazySimpleSerDe.java:initialize(135)) - 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe initialized with: 
columnNames=[_c0, _c1] columnTypes=[int, int] separator=[[B@6e6d4780] 
nullstring=\N lastColumnTakesRest=false timestampFormats=null
2015-04-13 17:04:54,406 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genTablePlan(9458)) - Created Table Plan for test_table 
TS[0]
2015-04-13 17:04:54,410 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genBodyPlan(8815)) - RR before GB test_table{(_c0,_c0: 
int)(_c1,_c1: int)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: 
bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: 
struct<transactionid:bigint,bucketid:int,rowid:bigint>)}  after GB 
test_table{(_c0,_c0: int)(_c1,_c1: 
int)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: 
bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: 
struct<transactionid:bigint,bucketid:int,rowid:bigint>)}
2015-04-13 17:04:54,410 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genSelectPlan(3608)) - tree: (TOK_SELECT (TOK_SELEXPR 
(TOK_FUNCTION hour_now)))
2015-04-13 17:04:54,413 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genSelectPlan(3718)) - genSelectPlan: input = 
test_table{(_c0,_c0: int)(_c1,_c1: 
int)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: 
bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: 
struct<transactionid:bigint,bucketid:int,rowid:bigint>)}  starRr = null
2015-04-13 17:04:54,435 ERROR org.apache.hadoop.hive.ql.Driver 
(SessionState.java:printError(958)) - FAILED: NullPointerException null
java.lang.NullPointerException
        at 
org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc.newInstance(ExprNodeGenericFuncDesc.java:232)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.getXpathOrFuncExprNodeDesc(TypeCheckProcFactory.java:1048)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.process(TypeCheckProcFactory.java:1265)
        at 
org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:90)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatchAndReturn(DefaultGraphWalker.java:95)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:79)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:133)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:110)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:205)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:149)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genAllExprNodeDesc(SemanticAnalyzer.java:10383)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genExprNodeDesc(SemanticAnalyzer.java:10338)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:3815)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:3594)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPostGroupByBodyPlan(SemanticAnalyzer.java:8864)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genBodyPlan(SemanticAnalyzer.java:8819)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:9663)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:9556)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genOPTree(SemanticAnalyzer.java:9992)
        at 
org.apache.hadoop.hive.ql.parse.CalcitePlanner.genOPTree(CalcitePlanner.java:306)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10003)
        at 
org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:195)
        at 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:224)
        at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:424)
        at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
        at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
        at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
        at 
org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:213)
        at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:165)
        at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
        at 
org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:736)
        at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
        at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:483)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
{code}

  was:
Just pulled the trunk and built the hive binary. If I create a permanent udf 
and exit the cli, and then open the cli and try calling the udf it fails with 
the exception below. However, the call succeeds if I call the udf right after 
registering the permanent udf (without exiting the cli). The call also succeeds 
with the apache-hive-1.0.0 release.

{code}
2015-04-09 23:19:25,485 INFO  hive.ql.parse.ParseDriver 
(ParseDriver.java:parse(185)) - Parsing command: select dateint_today() FROM 
nyigitbasi.all_maps LIMIT 1
2015-04-09 23:19:26,156 INFO  hive.ql.parse.ParseDriver 
(ParseDriver.java:parse(209)) - Parse Completed
2015-04-09 23:19:26,157 INFO  org.apache.hadoop.hive.ql.log.PerfLogger 
(PerfLogger.java:PerfLogEnd(148)) - </PERFLOG method=parse start=1428621565468 
end=1428621566157 duration=689 from=org.apache.hadoop.hive.ql.Driver>
2015-04-09 23:19:26,163 INFO  org.apache.hadoop.hive.ql.log.PerfLogger 
(PerfLogger.java:PerfLogBegin(121)) - <PERFLOG method=semanticAnalyze 
from=org.apache.hadoop.hive.ql.Driver>
2015-04-09 23:19:26,243 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:analyzeInternal(9997)) - Starting Semantic Analysis
2015-04-09 23:19:26,245 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genResolvedParseTree(9980)) - Completed phase 1 of 
Semantic Analysis
2015-04-09 23:19:26,245 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:getMetaData(1530)) - Get metadata for source tables
2015-04-09 23:19:26,245 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore 
(HiveMetaStore.java:logInfo(744)) - 0: get_table : db=nyigitbasi tbl=all_maps
2015-04-09 23:19:26,245 INFO  
org.apache.hadoop.hive.metastore.HiveMetaStore.audit 
(HiveMetaStore.java:logAuditEvent(369)) - ugi=dataeng        ip=unknown-ip-addr 
     cmd=get_table : db=nyigitbasi tbl=all_maps
2015-04-09 23:19:26,351 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:getMetaData(1682)) - Get metadata for subqueries
2015-04-09 23:19:26,360 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:getMetaData(1706)) - Get metadata for destination tables
2015-04-09 23:19:26,378 INFO  hive.ql.Context 
(Context.java:getMRScratchDir(330)) - New scratch dir is 
hdfs://10.171.119.231:9000/mnt/var/lib/hive/tmp/scratch/dataeng/81446a20-509e-428f-80e4-39c7e294d369/hive_2015-04-09_23-19-25_466_6525794380744094330-1
2015-04-09 23:19:26,381 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
(SemanticAnalyzer.java:genResolvedParseTree(9984)) - Completed getting MetaData 
in Semantic Analysis
2015-04-09 23:19:26,388 INFO  
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer 
(CalcitePlanner.java:canHandleAstForCbo(369)) - Not invoking CBO because the 
statement has too few joins
2015-04-09 23:19:26,720 ERROR org.apache.hadoop.hive.ql.Driver 
(SessionState.java:printError(958)) - FAILED: NullPointerException null
java.lang.NullPointerException
        at 
org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc.newInstance(ExprNodeGenericFuncDesc.java:232)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.getXpathOrFuncExprNodeDesc(TypeCheckProcFactory.java:1048)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.process(TypeCheckProcFactory.java:1265)
        at 
org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:90)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatchAndReturn(DefaultGraphWalker.java:94)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:78)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:132)
        at 
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:109)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:205)
        at 
org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:149)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genAllExprNodeDesc(SemanticAnalyzer.java:10383)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genExprNodeDesc(SemanticAnalyzer.java:10338)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:3815)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:3594)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPostGroupByBodyPlan(SemanticAnalyzer.java:8864)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genBodyPlan(SemanticAnalyzer.java:8819)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:9663)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:9556)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genOPTree(SemanticAnalyzer.java:9992)
        at 
org.apache.hadoop.hive.ql.parse.CalcitePlanner.genOPTree(CalcitePlanner.java:306)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10003)
        at 
org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:195)
        at 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:224)
        at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:424)
        at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
        at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
        at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
        at 
org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:213)
        at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:165)
        at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
        at 
org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:736)
        at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
        at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
{code}


> Cannot call permanent UDFs
> --------------------------
>
>                 Key: HIVE-10288
>                 URL: https://issues.apache.org/jira/browse/HIVE-10288
>             Project: Hive
>          Issue Type: Bug
>    Affects Versions: 1.2.0
>            Reporter: Nezih Yigitbasi
>
> Just pulled the trunk and built the hive binary. If I create a permanent udf 
> and exit the cli, and then open the cli and try calling the udf it fails with 
> the exception below. However, the call succeeds if I call the udf right after 
> registering the permanent udf (without exiting the cli). The call also 
> succeeds with the apache-hive-1.0.0 release.
> {code}
> 15-04-13 17:04:54,004 INFO  org.apache.hadoop.hive.ql.log.PerfLogger 
> (PerfLogger.java:PerfLogEnd(148)) - </PERFLOG method=parse 
> start=1428969893115 end=1428969894004 duration=889 
> from=org.apache.hadoop.hive.ql.Driver>
> 2015-04-13 17:04:54,007 DEBUG org.apache.hadoop.hive.ql.Driver 
> (Driver.java:recordValidTxns(939)) - Encoding valid txns info 
> 9223372036854775807:
> 2015-04-13 17:04:54,007 INFO  org.apache.hadoop.hive.ql.log.PerfLogger 
> (PerfLogger.java:PerfLogBegin(121)) - <PERFLOG method=semanticAnalyze 
> from=org.apache.hadoop.hive.ql.Driver>
> 2015-04-13 17:04:54,052 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:analyzeInternal(9997)) - Starting Semantic Analysis
> 2015-04-13 17:04:54,053 DEBUG org.apache.hadoop.hive.ql.exec.FunctionRegistry 
> (FunctionRegistry.java:getGenericUDAFResolver(942)) - Looking up GenericUDAF: 
> hour_now
> 2015-04-13 17:04:54,053 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:genResolvedParseTree(9980)) - Completed phase 1 of 
> Semantic Analysis
> 2015-04-13 17:04:54,053 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:getMetaData(1530)) - Get metadata for source tables
> 2015-04-13 17:04:54,054 INFO  org.apache.hadoop.hive.metastore.HiveMetaStore 
> (HiveMetaStore.java:logInfo(744)) - 0: get_table : db=default tbl=test_table
> 2015-04-13 17:04:54,054 INFO  
> org.apache.hadoop.hive.metastore.HiveMetaStore.audit 
> (HiveMetaStore.java:logAuditEvent(369)) - ugi=nyigitbasi   ip=unknown-ip-addr 
>      cmd=get_table : db=default tbl=test_table
> 2015-04-13 17:04:54,054 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
> (ObjectStore.java:debugLog(6776)) - Open transaction: count = 1, isActive = 
> true at:
>       
> org.apache.hadoop.hive.metastore.ObjectStore.getTable(ObjectStore.java:927)
> 2015-04-13 17:04:54,054 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
> (ObjectStore.java:debugLog(6776)) - Open transaction: count = 2, isActive = 
> true at:
>       
> org.apache.hadoop.hive.metastore.ObjectStore.getMTable(ObjectStore.java:990)
> 2015-04-13 17:04:54,104 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
> (ObjectStore.java:debugLog(6776)) - Commit transaction: count = 1, isactive 
> true at:
>       
> org.apache.hadoop.hive.metastore.ObjectStore.getMTable(ObjectStore.java:998)
> 2015-04-13 17:04:54,232 DEBUG org.apache.hadoop.hive.metastore.ObjectStore 
> (ObjectStore.java:debugLog(6776)) - Commit transaction: count = 0, isactive 
> true at:
>       
> org.apache.hadoop.hive.metastore.ObjectStore.getTable(ObjectStore.java:929)
> 2015-04-13 17:04:54,242 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:getMetaData(1682)) - Get metadata for subqueries
> 2015-04-13 17:04:54,247 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:getMetaData(1706)) - Get metadata for destination 
> tables
> 2015-04-13 17:04:54,256 INFO  org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:genResolvedParseTree(9984)) - Completed getting 
> MetaData in Semantic Analysis
> 2015-04-13 17:04:54,259 INFO  
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer 
> (CalcitePlanner.java:canHandleAstForCbo(369)) - Not invoking CBO because the 
> statement has too few joins
> 2015-04-13 17:04:54,344 DEBUG 
> org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe 
> (LazySimpleSerDe.java:initialize(135)) - 
> org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe initialized with: 
> columnNames=[_c0, _c1] columnTypes=[int, int] separator=[[B@6e6d4780] 
> nullstring=\N lastColumnTakesRest=false timestampFormats=null
> 2015-04-13 17:04:54,406 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:genTablePlan(9458)) - Created Table Plan for 
> test_table TS[0]
> 2015-04-13 17:04:54,410 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:genBodyPlan(8815)) - RR before GB test_table{(_c0,_c0: 
> int)(_c1,_c1: int)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: 
> bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: 
> struct<transactionid:bigint,bucketid:int,rowid:bigint>)}  after GB 
> test_table{(_c0,_c0: int)(_c1,_c1: 
> int)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: 
> bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: 
> struct<transactionid:bigint,bucketid:int,rowid:bigint>)}
> 2015-04-13 17:04:54,410 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:genSelectPlan(3608)) - tree: (TOK_SELECT (TOK_SELEXPR 
> (TOK_FUNCTION hour_now)))
> 2015-04-13 17:04:54,413 DEBUG org.apache.hadoop.hive.ql.parse.CalcitePlanner 
> (SemanticAnalyzer.java:genSelectPlan(3718)) - genSelectPlan: input = 
> test_table{(_c0,_c0: int)(_c1,_c1: 
> int)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: 
> bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: 
> struct<transactionid:bigint,bucketid:int,rowid:bigint>)}  starRr = null
> 2015-04-13 17:04:54,435 ERROR org.apache.hadoop.hive.ql.Driver 
> (SessionState.java:printError(958)) - FAILED: NullPointerException null
> java.lang.NullPointerException
>       at 
> org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc.newInstance(ExprNodeGenericFuncDesc.java:232)
>       at 
> org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.getXpathOrFuncExprNodeDesc(TypeCheckProcFactory.java:1048)
>       at 
> org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.process(TypeCheckProcFactory.java:1265)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:90)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatchAndReturn(DefaultGraphWalker.java:95)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:79)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:133)
>       at 
> org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:110)
>       at 
> org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:205)
>       at 
> org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:149)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genAllExprNodeDesc(SemanticAnalyzer.java:10383)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genExprNodeDesc(SemanticAnalyzer.java:10338)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:3815)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:3594)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPostGroupByBodyPlan(SemanticAnalyzer.java:8864)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genBodyPlan(SemanticAnalyzer.java:8819)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:9663)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:9556)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genOPTree(SemanticAnalyzer.java:9992)
>       at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.genOPTree(CalcitePlanner.java:306)
>       at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10003)
>       at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:195)
>       at 
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:224)
>       at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:424)
>       at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
>       at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
>       at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:213)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:165)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:736)
>       at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
>       at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:483)
>       at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
> {code}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to