Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import pyspark
- from pyspark import SparkContext, SparkConf
- from pyspark.sql import SparkSession
- # create a context including the cassandra connector
- conf = pyspark.SparkConf().set('spark.jars.packages', 'datastax:spark-cassandra-connector:2.4.0-s_2.11')\
- .setAppName('SparkCass').setMaster('local[1]')
- sc = pyspark.SparkContext(conf=conf)
- spark = SparkSession(sc)
- accidentsDf = spark.read.format('org.apache.spark.sql.cassandra')\
- .option('spark.cassandra.connection.host', '3.94.217.46')\
- .options(table='accident', keyspace='test_accidents').load()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement