Github user HyukjinKwon commented on a diff in the pull request:
https://github.com/apache/spark/pull/21368#discussion_r189480728
--- Diff: python/pyspark/shell.py ---
@@ -38,25 +41,29 @@
SparkContext._ensure_initialized()
try:
- # Try to access HiveConf, it will raise exception if Hive is not added
- conf = SparkConf()
- if conf.get('spark.sql.catalogImplementation', 'hive').lower() ==
'hive':
- SparkContext._jvm.org.apache.hadoop.hive.conf.HiveConf()
- spark = SparkSession.builder\
- .enableHiveSupport()\
- .getOrCreate()
- else:
+ try:
+ # Try to access HiveConf, it will raise exception if Hive is not
added
+ conf = SparkConf()
+ if conf.get('spark.sql.catalogImplementation', 'hive').lower() ==
'hive':
+ SparkContext._jvm.org.apache.hadoop.hive.conf.HiveConf()
+ spark = SparkSession.builder\
+ .enableHiveSupport()\
+ .getOrCreate()
+ else:
+ spark = SparkSession.builder.getOrCreate()
+ except py4j.protocol.Py4JError:
+ if conf.get('spark.sql.catalogImplementation', '').lower() ==
'hive':
+ warnings.warn("Fall back to non-hive support because failing
to access HiveConf, "
+ "please make sure you build spark with hive")
+ spark = SparkSession.builder.getOrCreate()
+ except TypeError:
+ if conf.get('spark.sql.catalogImplementation', '').lower() ==
'hive':
+ warnings.warn("Fall back to non-hive support because failing
to access HiveConf, "
+ "please make sure you build spark with hive")
spark = SparkSession.builder.getOrCreate()
-except py4j.protocol.Py4JError:
- if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive':
- warnings.warn("Fall back to non-hive support because failing to
access HiveConf, "
- "please make sure you build spark with hive")
- spark = SparkSession.builder.getOrCreate()
-except TypeError:
- if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive':
- warnings.warn("Fall back to non-hive support because failing to
access HiveConf, "
- "please make sure you build spark with hive")
- spark = SparkSession.builder.getOrCreate()
+except Exception as e:
+ print("Failed to initialize Spark session:", e, file=sys.stderr)
--- End diff --
For consistency, it sounds better to print out traceback here too likewise
with Scala side?
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]