Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import org.apache.spark.sql._
- import com.databricks._
- val sqlc = SparkSession.builder.appName("test_job").getOrCreate()
- sqlc.read
- .format("com.databricks.spark.redshift")
- .option("url", "jdbc:postgresql://{servername}.redshift.amazonaws.com:{portnumber}/prod")
- .option("user", "username")
- .option("password", "password")
- .option("query", "select train_nbr from datamart.train_ticket")
- .option("forward_spark_s3_credentials", "true")
- .option("tempdir", "s3n://{path}}/tempfile")
- .load().createOrReplaceTempView("temp")
- val allrecords = sqlc.sql("select count(1) from temp")
- allrecords.show()
Add Comment
Please, Sign In to add comment