Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- // Imports
- import org.apache.spark.sql.hive.HiveContext
- import org.apache.spark.{SparkConf, SparkContext}
- import org.apache.spark.sql.SaveMode
- import scala.concurrent.ExecutionContext.Implicits.global
- import java.util.Properties
- import scala.concurrent.Future
- // Set up spark on local with 2 threads
- val conf = new SparkConf().setMaster("local[2]").setAppName("app")
- val sc = new SparkContext(conf)
- val sqlCtx = new HiveContext(sc)
- // Create fake dataframe
- import sqlCtx.implicits._
- var df = sc.parallelize(1 to 50000).map { i => (i, i, i, i, i, i, i) }.toDF("a", "b", "c", "d", "e", "f", "g").repartition(2)
- // Write it as a parquet file
- df.write.parquet("/tmp/parquet1")
- df = sqlCtx.read.parquet("/tmp/parquet1")
- // JDBC connection
- val url = s"jdbc:postgresql://localhost:5432/tempdb"
- val prop = new Properties()
- prop.setProperty("user", "admin")
- prop.setProperty("password", "")
- // 4 futures - at least one of them has been consistently failing for
- val x1 = Future { df.write.jdbc(url, "temp1", prop) }
- val x2 = Future { df.write.jdbc(url, "temp2", prop) }
- val x3 = Future { df.write.jdbc(url, "temp3", prop) }
- val x4 = Future { df.write.jdbc(url, "temp4", prop) }
- org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87) ~[org.apache.spark.spark-sql_2.11-1.6.0.jar:1.6.0]
- at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:2125) ~[org.apache.spark.spark-sql_2.11-1.6.0.jar:1.6.0]
- at org.apache.spark.sql.DataFrame.foreachPartition(DataFrame.scala:1482) ~[org.apache.spark.spark-sql_2.11-1.6.0.jar:1.6.0]
- at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.saveTable(JdbcUtils.scala:247) ~[org.apache.spark.spark-sql_2.11-1.6.0.jar:1.6.0]
- at org.apache.spark.sql.DataFrameWriter.jdbc(DataFrameWriter.scala:306) ~[org.apache.spark.spark-sql_2.11-1.6.0.jar:1.6.0]
- at writer.SQLWriter$.writeDf(Writer.scala:75) ~[temple.temple-1.0-sans-externalized.jar:na]
- at writer.Writer$.writeDf(Writer.scala:33) ~[temple.temple-1.0-sans-externalized.jar:na]
- at controllers.Api$$anonfun$downloadTable$1$$anonfun$apply$25.apply(Api.scala:460) ~[temple.temple-1.0-sans-externalized.jar:2.4.6]
- at controllers.Api$$anonfun$downloadTable$1$$anonfun$apply$25.apply(Api.scala:452) ~[temple.temple-1.0-sans-externalized.jar:2.4.6]
- at scala.util.Success$$anonfun$map$1.apply(Try.scala:237) ~[org.scala-lang.scala-library-2.11.7.jar:na]
- import java.util.concurrent.Executors
- import concurrent.ExecutionContext
- val executorService = Executors.newFixedThreadPool(4)
- implicit val ec = ExecutionContext.fromExecutorService(executorService)
Add Comment
Please, Sign In to add comment