Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- lazy val providedDependencies = Seq(
- "org.apache.spark" %% "spark-core" % "2.0.0-preview",
- "org.apache.spark" %% "spark-sql" % "2.0.0-preview",
- "org.elasticsearch" %% "elasticsearch-spark" % "2.3.3"
- )
- libraryDependencies ++= providedDependencies
- import org.apache.spark.rdd.RDD
- import org.apache.spark.sql._
- import org.elasticsearch.spark._
- case class Foo(Id : Int, Material_Id : Int, Quantity : Double)
- object TestBatch {
- def main(args : Array[String]) {
- val spark = SparkSession.builder.master("local[4]").appName("TestJob-Batch").getOrCreate()
- import spark.implicits._
- val fooRDD = spark.createDataFrame(Seq(Foo(1, 2, 3.0))).rdd
- val esConfig = Map(
- "es.nodes" -> "localhost",
- "es.port" -> "9200",
- "es.index.auto.create" -> "yes",
- "es.mapping.id" -> "id"
- )
- fooRDD.saveToEs(resource = "spark/foo", cfg = esConfig)
- System.exit(0)
- }
- }
- java.lang.NoSuchMethodError: org.apache.spark.TaskContext.addOnCompleteCallback(Lscala/Function0;)V
- at org.elasticsearch.spark.rdd.EsRDDWriter.write(EsRDDWriter.scala:42)
- at org.elasticsearch.spark.rdd.EsSpark$$anonfun$doSaveToEs$1.apply(EsSpark.scala:84)
- at org.elasticsearch.spark.rdd.EsSpark$$anonfun$doSaveToEs$1.apply(EsSpark.scala:84)
- at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70)
- at org.apache.spark.scheduler.Task.run(Task.scala:85)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
- at java.lang.Thread.run(Thread.java:745)
Add Comment
Please, Sign In to add comment