Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import org.apache.log4j.{Level, Logger}
- import org.apache.spark.sql.SparkSession
- class SparkTest {
- def JdbcExample(): Unit = {
- Logger.getLogger("org").setLevel(Level.ERROR)
- Logger.getLogger("akka").setLevel(Level.ERROR)
- val spark = SparkSession
- .builder
- .master("local[*]")
- .appName("Spark Postgres")
- .getOrCreate()
- import spark.implicits._
- val jdbcDF = spark.read
- .format("jdbc")
- .option("url", "jdbc:postgresql://localhost/testdb")
- .option("dbtable", "public.employee")
- .option("user", "scala")
- .option("password", "scala")
- .load()
- jdbcDF.show(5)
- /*jdbcDF = jdbcDF.sort($"id".asc)
- jdbcDF.show(5)
- jdbcDF = jdbcDF.filter($"age" > 15)
- jdbcDF.show(5)*/
- val df = spark.read
- .format("jdbc")
- .option("url", "jdbc:postgresql://localhost/testdb")
- .option("dbtable", "public.details")
- .option("user", "scala")
- .option("password", "scala")
- .load()
- df.show(5)
- val inner_df = jdbcDF.join(df, jdbcDF("id") === df("id"))
- inner_df.show()
- /* jdbcDF.coalesce(1)
- .write
- .format("csv")
- .option("header", "true")
- .option("inferSchema", "true")
- .save("/home/sajita/Desktop/test.csv")*/
- }
- }
- object test {
- def main(args: Array[String]) {
- val DBobj = new SparkTest
- DBobj.JdbcExample()
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement