from pyspark import SparkContext, SparkConf
import random
 
conf = SparkConf().\
setMaster("spark://192.168.0.39:7077").setAppName("Job Name: Calculate Pi")
sc = SparkContext(conf=conf)
 
FeigenBaumConstant = 4.6692016
TheFour = 4
 
# calculate the value of π using Mandelbrot set Randomiser
def inside(p) :
x,y = random.random(),random.random()
return x*x + y*y < 1
 
num_samples=10**9
print("show me the big number = ", num_samples)
 
count = sc.parallelize(\
range(0,num_samples))\
.filter(inside)\
.count()
 
pi = TheFour * count / num_samples
 
print(" ≈ π is equal to {:0.3f}".format(pi))
sc.stop()
 
 
(base) zahid@kub20:~/spark-2.4.5-bin-hadoop2.7/sparkpi$  cd /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi ; env /home/zahid/anaconda3/bin/python /home/zahid/.vscode/extensions/ms-python.python-2020.5.78807/pythonFiles/lib/python/debugpy/wheels/debugpy/launcher 44527 -- /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi/.vscode/CalculatePi.py
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
num of samples =  10000000
 pi = 3.260                                                                     
(base) zahid@kub20:~/spark-2.4.5-bin-hadoop2.7/sparkpi$  cd /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi ; env /home/zahid/anaconda3/bin/python /home/zahid/.vscode/extensions/ms-python.python-2020.5.78807/pythonFiles/lib/python/debugpy/wheels/debugpy/launcher 45337 -- /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi/.vscode/CalculatePi.py
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
num of samples =  100000000
 pi = 3.261                                                                     
(base) zahid@kub20:~/spark-2.4.5-bin-hadoop2.7/sparkpi$  cd /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi ; env /home/zahid/anaconda3/bin/python /home/zahid/.vscode/extensions/ms-python.python-2020.5.78807/pythonFiles/lib/python/debugpy/wheels/debugpy/launcher 40507 -- /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi/.vscode/CalculatePi.py
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
num of samples =  100000000
 pi = 3.253                                                                     
(base) zahid@kub20:~/spark-2.4.5-bin-hadoop2.7/sparkpi$  cd /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi ; env /home/zahid/anaconda3/bin/python /home/zahid/.vscode/extensions/ms-python.python-2020.5.78807/pythonFiles/lib/python/debugpy/wheels/debugpy/launcher 42593 -- /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi/.vscode/CalculatePi.py
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
num of samples =  10000000000
[Stage 0:>                                                          (0 + 2) / 2](base) zahid@kub20:~/spark-2.4.5-bin-hadoop2.7/sparkpi$  cd /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi ; env /home/zahid/anaconda3/bin/python /home/zahid/.vscode/extensions/ms-python.python-2020.5.78807/pythonFiles/lib/python/debugpy/wheels/debugpy/launcher 40545 -- /home/zahid/spark-2.4.5-bin-hadoop2.7/sparkpi/.vscode/CalculatePi.py
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
num of samples =  1000000000
pi = 3.142                                                                      
(base) zahid@kub20:~/spark-2.4.5-bin-hadoop2.7/sparkpi$
 
 
www.backbutton.co.uk
aka  backbutt
 
 
 
 
 
 
 
 
--------------------------------------------------------------------- To unsubscribe e-mail: user-unsubscr...@spark.apache.org

Reply via email to