Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- val sparkSession = SparkSession
- .builder
- .master("local")
- .appName("CollabrativeFilter")
- .config("spark.sql.warehouse.dir", "file:///c:/Temp/spark-warehouse")
- .config("spark.sql.crossJoin.enabled", true)
- .getOrCreate()
- val awsAccessKeyId = "value"
- val awsSecretAccessKey = "value"
- val redshiftDBName = "value"
- val redshiftUserId = "value"
- val redshiftPassword = "value"
- val redshifturl = "value"
- val jdbcURL = s"jdbc:redshift://$redshifturl/$redshiftDBName?user=$redshiftUserId&password=$redshiftPassword"
- val tempS3Dir = "s3n:accessid:secretkey@bucket/"
- val eventsDF = sparkSession.read
- .format("com.databricks.spark.redshift")
- .option("url",jdbcURL )
- .option("tempdir", tempS3Dir)
- .option("dbtable", "table_name")
- .option("forward_spark_s3_credentials","true")
- .load()
- eventsDF.show()
- Exception in thread "main" java.lang.NoClassDefFoundError: com/amazonaws/services/s3/model/S3ObjectInputStream
- at com.databricks.spark.redshift.DefaultSource.createRelation(DefaultSource.scala:51)
- at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:330)
- at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:152)
- at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:125)
- at new_test$.main(new_test.scala:42)
- at new_test.main(new_test.scala)
- Caused by: java.lang.ClassNotFoundException: com.amazonaws.services.s3.model.S3ObjectInputStream
- at java.net.URLClassLoader.findClass(Unknown Source)
- at java.lang.ClassLoader.loadClass(Unknown Source)
- at sun.misc.Launcher$AppClassLoader.loadClass(Unknown Source)
- at java.lang.ClassLoader.loadClass(Unknown Source)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement