Guest User

Untitled

a guest
Mar 23rd, 2018
86
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.78 KB | None | 0 0
  1. org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1708)
  2. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1696)
  3. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1695)
  4. at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  5. at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  6. at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1695)
  7. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:855)
  8. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:855)
  9. at scala.Option.foreach(Option.scala:257)
  10. at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:855)
  11. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1923)
  12. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1878)
  13. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1867)
  14. at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  15. at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:671)
  16. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
  17. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2050)
  18. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2069)
  19. at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:336)
  20. at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:38)
  21. at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:2861)
  22. at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2150)
  23. at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2150)
  24. at org.apache.spark.sql.Dataset$$anonfun$55.apply(Dataset.scala:2842)
  25. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
  26. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:2841)
  27. at org.apache.spark.sql.Dataset.head(Dataset.scala:2150)
  28. at org.apache.spark.sql.Dataset.take(Dataset.scala:2363)
  29. ... 48 elided
  30. Caused by: java.lang.IllegalArgumentException: Illegal pattern component: XXX
  31. at org.apache.commons.lang3.time.FastDatePrinter.parsePattern(FastDatePrinter.java:282)
  32. at org.apache.commons.lang3.time.FastDatePrinter.init(FastDatePrinter.java:149)
  33. at org.apache.commons.lang3.time.FastDatePrinter.<init>(FastDatePrinter.java:142)
  34. at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:384)
  35. at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:369)
  36. at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:91)
  37. at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:88)
  38. at org.apache.commons.lang3.time.FormatCache.getInstance(FormatCache.java:82)
  39. at org.apache.commons.lang3.time.FastDateFormat.getInstance(FastDateFormat.java:165)
  40. at org.apache.spark.sql.catalyst.json.JSONOptions.<init>(JSONOptions.scala:81)
  41. at org.apache.spark.sql.catalyst.json.JSONOptions.<init>(JSONOptions.scala:43)
  42. at org.apache.spark.sql.Dataset$$anonfun$51.apply(Dataset.scala:2747)
  43. at org.apache.spark.sql.Dataset$$anonfun$51.apply(Dataset.scala:2743)
  44. at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:797)
  45. at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:797)
  46. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  47. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  48. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  49. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  50. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  51. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  52. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  53. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  54. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  55. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  56. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  57. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  58. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  59. at org.apache.spark.scheduler.Task.run(Task.scala:108)
  60. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
  61. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  62. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  63. at java.lang.Thread.run(Thread.java:748)
Add Comment
Please, Sign In to add comment