[ 
https://issues.apache.org/jira/browse/HDDS-1430?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16815855#comment-16815855
 ] 

Ajay Kumar commented on HDDS-1430:
----------------------------------

{code}Exception in thread "main" java.lang.NullPointerException
        at 
org.apache.hadoop.crypto.key.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:660)
        at 
org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:96)
        at 
org.apache.hadoop.util.KMSUtil.createKeyProviderFromUri(KMSUtil.java:83)
        at 
org.apache.hadoop.ozone.client.rpc.OzoneKMSUtil.getKeyProvider(OzoneKMSUtil.java:131)
        at 
org.apache.hadoop.ozone.client.rpc.RpcClient.getKeyProvider(RpcClient.java:979)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.hadoop.ozone.client.OzoneClientInvocationHandler.invoke(OzoneClientInvocationHandler.java:54)
        at com.sun.proxy.$Proxy17.getKeyProvider(Unknown Source)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.hadoop.hdds.tracing.TraceAllMethod.invoke(TraceAllMethod.java:66)
        at com.sun.proxy.$Proxy17.getKeyProvider(Unknown Source)
        at 
org.apache.hadoop.ozone.client.ObjectStore.getKeyProvider(ObjectStore.java:266)
        at 
org.apache.hadoop.fs.ozone.BasicOzoneClientAdapterImpl.getKeyProvider(BasicOzoneClientAdapterImpl.java:281)
        at 
org.apache.hadoop.fs.ozone.OzoneFileSystem.getKeyProvider(OzoneFileSystem.java:51)
        at 
org.apache.hadoop.fs.ozone.OzoneFileSystem.getAdditionalTokenIssuers(OzoneFileSystem.java:62)
        at 
org.apache.hadoop.security.token.DelegationTokenIssuer.collectDelegationTokens(DelegationTokenIssuer.java:104)
        at 
org.apache.hadoop.security.token.DelegationTokenIssuer.addDelegationTokens(DelegationTokenIssuer.java:76)
        at 
org.apache.spark.deploy.security.HadoopFSDelegationTokenProvider$$anonfun$org$apache$spark$deploy$security$HadoopFSDelegationTokenProvider$$fetchDelegationTokens$1.apply(HadoopFSDelegationTokenProvider.scala:98)
        at 
org.apache.spark.deploy.security.HadoopFSDelegationTokenProvider$$anonfun$org$apache$spark$deploy$security$HadoopFSDelegationTokenProvider$$fetchDelegationTokens$1.apply(HadoopFSDelegationTokenProvider.scala:96)
        at scala.collection.immutable.Set$Set1.foreach(Set.scala:94)
        at 
org.apache.spark.deploy.security.HadoopFSDelegationTokenProvider.org$apache$spark$deploy$security$HadoopFSDelegationTokenProvider$$fetchDelegationTokens(HadoopFSDelegationTokenProvider.scala:96)
        at 
org.apache.spark.deploy.security.HadoopFSDelegationTokenProvider.obtainDelegationTokens(HadoopFSDelegationTokenProvider.scala:49)
        at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anonfun$obtainDelegationTokens$2.apply(HadoopDelegationTokenManager.scala:132)
        at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anonfun$obtainDelegationTokens$2.apply(HadoopDelegationTokenManager.scala:130)
        at 
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
        at 
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
        at scala.collection.Iterator$class.foreach(Iterator.scala:891)
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
        at 
scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206)
        at 
scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
        at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
        at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager.obtainDelegationTokens(HadoopDelegationTokenManager.scala:130)
        at 
org.apache.spark.deploy.yarn.security.YARNHadoopDelegationTokenManager.obtainDelegationTokens(YARNHadoopDelegationTokenManager.scala:59)
        at 
org.apache.spark.deploy.yarn.Client.setupSecurityToken(Client.scala:309)
        at 
org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:1013)
        at 
org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:178)
        at 
org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:57)
        at 
org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:186)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:501)
        at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520)
        at 
org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:935)
        at 
org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:926)
        at scala.Option.getOrElse(Option.scala:121)
        at 
org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:926)
        at 
org.apache.spark.examples.DFSReadWriteTest$.main(DFSReadWriteTest.scala:106)
        at 
org.apache.spark.examples.DFSReadWriteTest.main(DFSReadWriteTest.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
        at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
        at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala){code}

> NPE if secure ozone if KMS uri is not defined.
> ----------------------------------------------
>
>                 Key: HDDS-1430
>                 URL: https://issues.apache.org/jira/browse/HDDS-1430
>             Project: Hadoop Distributed Data Store
>          Issue Type: Sub-task
>    Affects Versions: 0.4.0
>            Reporter: Ajay Kumar
>            Assignee: Ajay Kumar
>            Priority: Blocker
>
> OzoneKMSUtil.getKeyProvider throws NPE if KMS uri is not defined. 



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-issues-unsubscr...@hadoop.apache.org
For additional commands, e-mail: hdfs-issues-h...@hadoop.apache.org

Reply via email to