park import SparkContext, SparkConf
>> from pyspark.sql import SparkSession
>>
>> conf = SparkConf()\
>> .setMaster("local[8]")\
>> .setAppName("Test")
>>
>> sc = SparkContext(conf=conf)
>>
>> spark = SparkSessio
enableHiveSupport()\
> .getOrCreate()
>
> Mind you this is for spark 2.0 and above
>
>
>
> --
> View this message in context: http://apache-spark-user-list.
> 1001560.n3.nabble.com/Spark-Python-in-Jupyter-Notebook-tp28268p28274.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>
> -
> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>
>
spark 2.0 and above
--
View this message in context:
http://apache-spark-user-list.1001560.n3.nabble.com/Spark-Python-in-Jupyter-Notebook-tp28268p28274.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.
-
To unsubscribe e-mail: user-unsubscr...@spark.apache.org