Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from pyspark import SparkContext
- import random
- def inside(p):
- x, y = random.random(), random.random()
- return x*x + y*y < 1
- NUM_SAMPLES = 1000
- count = sc.parallelize(xrange(0, NUM_SAMPLES)).filter(inside).count()
- print "Pi is roughly %f" % (4.0 * count / NUM_SAMPLES)
Add Comment
Please, Sign In to add comment