Shyamala Gowri created BAHIR-316:
------------------------------------

             Summary: Connectivity to cloudant not working from a spark 
notebook on a FIPS enabled cluster
                 Key: BAHIR-316
                 URL: https://issues.apache.org/jira/browse/BAHIR-316
             Project: Bahir
          Issue Type: Bug
          Components: Spark SQL Data Sources
    Affects Versions: Not Applicable
            Reporter: Shyamala Gowri
             Fix For: Not Applicable


Connectivity to cloudant could not be established while working from a notebook 
on a FIPS enabled cluster.

 

Code used from the notebook

sqlContext.read.format("org.apache.bahir.cloudant").option("cloudant.host","cloudant-host-bluemix.cloudantnosqldb.appdomain.cloud").option("cloudant.username","apikey-v2-*********").option("cloudant.password","*********************").load("databasedemo")

 

Throws the following error -

```
Py4JJavaError Traceback (most recent call last) 
/usr/local/share/jupyter/kernels/python39/scripts/launch_ipykernel.py in 
<module> ----> 1 
sqlContext.read.format("org.apache.bahir.cloudant").option("cloudant.host","b2545557-9e61-4802-a4e8-1535273619bf-bluemix.cloudantnosqldb.appdomain.cloud").option("cloudant.username","apikey-v2-*********").option("cloudant.password","********").load("databasedemo")
 /opt/ibm/spark/python/lib/pyspark.zip/pyspark/sql/readwriter.py in load(self, 
path, format, schema, **options)  175 self.options(**options)  176 if 
isinstance(path, str): --> 177 return self._df(self._jreader.load(path))  178 
elif path is not None:  179 if type(path) != list: 
/opt/ibm/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py in 
__call__(self, *args)  1319  1320 answer = 
self.gateway_client.send_command(command) -> 1321 return_value = 
get_return_value(  1322 answer, self.gateway_client, self.target_id, self.name) 
 1323 /opt/ibm/spark/python/lib/pyspark.zip/pyspark/sql/utils.py in deco(*a, 
**kw)  188 def deco(*a: Any, **kw: Any) -> Any:  189 try: --> 190 return f(*a, 
**kw)  191 except Py4JJavaError as e:  192 converted = 
convert_exception(e.java_exception) 
/opt/ibm/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/protocol.py in 
get_return_value(answer, gateway_client, target_id, name)  324 value = 
OUTPUT_CONVERTER[type](answer[2:], gateway_client)  325 if answer[1] == 
REFERENCE_TYPE: --> 326 raise Py4JJavaError(  327 "An error occurred while 
calling \{0}{1}\{2}.\n".  328 format(target_id, ".", name), value) 
Py4JJavaError: An error occurred while calling o112.load. : 
com.cloudant.client.org.lightcouch.CouchDbException: Error retrieving server 
response at 
[https://b2545557-9e61-4802-a4e8-1535273619bf-bluemix.cloudantnosqldb.appdomain.cloud/databasedemo/_all_docs?limit=1.]
 at 
com.cloudant.client.org.lightcouch.CouchDbClient.execute(CouchDbClient.java:630)
 at 
com.cloudant.client.api.CloudantClient.executeRequest(CloudantClient.java:411) 
at 
org.apache.bahir.cloudant.CloudantConfig.executeRequest(CloudantConfig.scala:73)
 at 
org.apache.bahir.cloudant.common.JsonStoreDataAccess.getQueryResult(JsonStoreDataAccess.scala:114)
 at 
org.apache.bahir.cloudant.common.JsonStoreDataAccess.getTotalRows(JsonStoreDataAccess.scala:70)
 at 
org.apache.bahir.cloudant.common.JsonStoreRDD.getPartitions(JsonStoreRDD.scala:189)
 at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:292) at 
scala.Option.getOrElse(Option.scala:189) at 
org.apache.spark.rdd.RDD.partitions(RDD.scala:288) at 
org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) 
at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:292) at 
scala.Option.getOrElse(Option.scala:189) at 
org.apache.spark.rdd.RDD.partitions(RDD.scala:288) at 
org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) 
at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:292) at 
scala.Option.getOrElse(Option.scala:189) at 
org.apache.spark.rdd.RDD.partitions(RDD.scala:288) at 
org.apache.spark.sql.execution.SQLExecutionRDD.getPartitions(SQLExecutionRDD.scala:44)
 at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:292) at 
scala.Option.getOrElse(Option.scala:189) at 
org.apache.spark.rdd.RDD.partitions(RDD.scala:288) at 
org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49) 
at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:292) at 
scala.Option.getOrElse(Option.scala:189) at 
org.apache.spark.rdd.RDD.partitions(RDD.scala:288) at 
org.apache.spark.SparkContext.runJob(SparkContext.scala:2323) at 
org.apache.spark.sql.catalyst.json.JsonInferSchema.infer(JsonInferSchema.scala:116)
 at 
org.apache.spark.sql.execution.datasources.json.TextInputJsonDataSource$.$anonfun$inferFromDataset$5(JsonDataSource.scala:110)
 at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
 at 
org.apache.spark.sql.execution.datasources.json.TextInputJsonDataSource$.inferFromDataset(JsonDataSource.scala:110)
 at 
org.apache.spark.sql.DataFrameReader.$anonfun$json$6(DataFrameReader.scala:415) 
at scala.Option.getOrElse(Option.scala:189) at 
org.apache.spark.sql.DataFrameReader.json(DataFrameReader.scala:415) at 
org.apache.bahir.cloudant.DefaultSource.create(DefaultSource.scala:118) at 
org.apache.bahir.cloudant.DefaultSource.createRelation(DefaultSource.scala:95) 
at 
org.apache.bahir.cloudant.DefaultSource.createRelation(DefaultSource.scala:87) 
at 
org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:350)
 at 
org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:228) at 
org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:210) 
at scala.Option.getOrElse(Option.scala:189) at 
org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:210) at 
org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:185) at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
 at 
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.base/java.lang.reflect.Method.invoke(Method.java:566) at 
py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at 
py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at 
py4j.Gateway.invoke(Gateway.java:282) at 
py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at 
py4j.commands.CallCommand.execute(CallCommand.java:79) at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at 
py4j.ClientServerConnection.run(ClientServerConnection.java:106) at 
java.base/java.lang.Thread.run(Thread.java:840) Caused by: 
javax.net.ssl.SSLHandshakeException: Received fatal alert: handshake_failure at 
java.base/sun.security.ssl.Alert.createSSLException(Alert.java:131) at 
java.base/sun.security.ssl.Alert.createSSLException(Alert.java:117) at 
java.base/sun.security.ssl.TransportContext.fatal(TransportContext.java:340) at 
java.base/sun.security.ssl.Alert$AlertConsumer.consume(Alert.java:293) at 
java.base/sun.security.ssl.TransportContext.dispatch(TransportContext.java:186) 
at java.base/sun.security.ssl.SSLTransport.decode(SSLTransport.java:172) at 
java.base/sun.security.ssl.SSLSocketImpl.decode(SSLSocketImpl.java:1506) at 
java.base/sun.security.ssl.SSLSocketImpl.readHandshakeRecord(SSLSocketImpl.java:1416)
 at 
java.base/sun.security.ssl.SSLSocketImpl.startHandshake(SSLSocketImpl.java:456) 
at 
java.base/sun.security.ssl.SSLSocketImpl.startHandshake(SSLSocketImpl.java:427) 
at 
java.base/sun.net.www.protocol.https.HttpsClient.afterConnect(HttpsClient.java:572)
 at 
java.base/sun.net.www.protocol.https.AbstractDelegateHttpsURLConnection.connect(AbstractDelegateHttpsURLConnection.java:201)
 at 
java.base/sun.net.www.protocol.http.HttpURLConnection.getInputStream0(HttpURLConnection.java:1592)
 at 
java.base/sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1520)
 at 
java.base/java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:527)
 at 
java.base/sun.net.www.protocol.https.HttpsURLConnectionImpl.getResponseCode(HttpsURLConnectionImpl.java:334)
 at 
com.cloudant.http.interceptors.Replay429Interceptor.interceptResponse(Replay429Interceptor.java:88)
 at com.cloudant.http.HttpConnection.execute(HttpConnection.java:341) at 
com.cloudant.client.org.lightcouch.CouchDbClient.execute(CouchDbClient.java:552)
 ... 53 more
```
 
 
 
 



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to