Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import org.apache.spark.{SparkContext,SparkConf}
- import org.apache.spark.sql.hive.HiveContext
- //Do add the following artifact in build.sbt
- //libraryDependencies += "mysql" % "mysql-connector-java" % "5.1.43"
- //initializations
- val conf = new SparkConf().setAppName("xx").setMaster("local[2]")
- val sc = new SparkContext(conf)
- val hiveContext = new HiveContext(sc)
- // can use sqlContext as well (i.e. sparkSQL in native context)
- val prop = new java.util.Properties
- prop.setProperty("user","root")
- prop.setProperty("password","XXX")
- val df_mysql = hiveContext.read.jdbc("jdbc:mysql://localhost:3306/retail_db","products",prop)
- df_mysql.show
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement