Advertisement
Guest User

Untitled

a guest
Jul 17th, 2017
89
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 9.46 KB | None | 0 0
  1. /opt/spark/bin/spark-submit --class com.myntelligence.text.tranformation.ScrapperService --master "local[*]" --driver-memory ${SPARK_DRIVER_MEMORY} --driver-java-options="-Dapplication.spark.master=spark://${HOSTNAME}:7077 -Dapplication.amqp.host=${RABBITMQ_HOST} -Dapplication.amqp.port=${RABBITMQ_PORT} -Dapplication.amqp.username=${RABBITMQ_USER} -Dapplication.amqp.password=${RABBITMQ_PASSWORD}" /spark-jobs/myntelligence-text-transformation.jar
  2. Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 4 times, most recent failure: Lost task 0.3 in stage 4.0 (TID 75, 172.19.0.8, executor 0): com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'host'
  3. at com.typesafe.config.impl.SimpleConfig.findKeyOrNull(SimpleConfig.java:152)
  4. at com.typesafe.config.impl.SimpleConfig.findOrNull(SimpleConfig.java:170)
  5. at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:184)
  6. at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:189)
  7. at com.typesafe.config.impl.SimpleConfig.getString(SimpleConfig.java:246)
  8. at io.scalac.amqp.ConnectionSettings$$anonfun$apply$1.apply(ConnectionSettings.scala:49)
  9. at io.scalac.amqp.ConnectionSettings$$anonfun$apply$1.apply(ConnectionSettings.scala:47)
  10. at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  11. at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  12. at scala.collection.Iterator$class.foreach(Iterator.scala:893)
  13. at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
  14. at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
  15. at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
  16. at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
  17. at scala.collection.AbstractTraversable.map(Traversable.scala:104)
  18. at io.scalac.amqp.ConnectionSettings$.apply(ConnectionSettings.scala:51)
  19. at io.scalac.amqp.Connection$.apply(Connection.scala:15)
  20. at com.myntelligence.text.tranformation.ScrapperService$.handler(ScrapperService.scala:94)
  21. at com.myntelligence.text.tranformation.ScrapperService$$anonfun$main$1$$anonfun$apply$1.apply(ScrapperService.scala:82)
  22. at com.myntelligence.text.tranformation.ScrapperService$$anonfun$main$1$$anonfun$apply$1.apply(ScrapperService.scala:82)
  23. at scala.collection.Iterator$class.foreach(Iterator.scala:893)
  24. at org.apache.spark.util.CompletionIterator.foreach(CompletionIterator.scala:26)
  25. at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:918)
  26. at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:918)
  27. at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1951)
  28. at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1951)
  29. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  30. at org.apache.spark.scheduler.Task.run(Task.scala:99)
  31. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:322)
  32. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  33. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  34. at java.lang.Thread.run(Thread.java:745)
  35.  
  36. Driver stacktrace:
  37. at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435)
  38. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423)
  39. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422)
  40. at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  41. at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  42. at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)
  43. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
  44. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
  45. at scala.Option.foreach(Option.scala:257)
  46. at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
  47. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650)
  48. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605)
  49. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594)
  50. at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  51. at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628)
  52. at org.apache.spark.SparkContext.runJob(SparkContext.scala:1925)
  53. at org.apache.spark.SparkContext.runJob(SparkContext.scala:1938)
  54. at org.apache.spark.SparkContext.runJob(SparkContext.scala:1951)
  55. at org.apache.spark.SparkContext.runJob(SparkContext.scala:1965)
  56. at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:918)
  57. at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:916)
  58. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  59. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  60. at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
  61. at org.apache.spark.rdd.RDD.foreach(RDD.scala:916)
  62. at com.myntelligence.text.tranformation.ScrapperService$$anonfun$main$1.apply(ScrapperService.scala:82)
  63. at com.myntelligence.text.tranformation.ScrapperService$$anonfun$main$1.apply(ScrapperService.scala:80)
  64. at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:627)
  65. at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:627)
  66. at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:51)
  67. at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
  68. at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
  69. at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:415)
  70. at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:50)
  71. at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
  72. at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
  73. at scala.util.Try$.apply(Try.scala:192)
  74. at org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
  75. at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:256)
  76. at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:256)
  77. at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:256)
  78. at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
  79. at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:255)
  80. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  81. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  82. at java.lang.Thread.run(Thread.java:745)
  83. Caused by: com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'host'
  84. at com.typesafe.config.impl.SimpleConfig.findKeyOrNull(SimpleConfig.java:152)
  85. at com.typesafe.config.impl.SimpleConfig.findOrNull(SimpleConfig.java:170)
  86. at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:184)
  87. at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:189)
  88. at com.typesafe.config.impl.SimpleConfig.getString(SimpleConfig.java:246)
  89. at io.scalac.amqp.ConnectionSettings$$anonfun$apply$1.apply(ConnectionSettings.scala:49)
  90. at io.scalac.amqp.ConnectionSettings$$anonfun$apply$1.apply(ConnectionSettings.scala:47)
  91. at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  92. at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  93. at scala.collection.Iterator$class.foreach(Iterator.scala:893)
  94. at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
  95. at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
  96. at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
  97. at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
  98. at scala.collection.AbstractTraversable.map(Traversable.scala:104)
  99. at io.scalac.amqp.ConnectionSettings$.apply(ConnectionSettings.scala:51)
  100. at io.scalac.amqp.Connection$.apply(Connection.scala:15)
  101. at com.myntelligence.text.tranformation.ScrapperService$.handler(ScrapperService.scala:94)
  102. at com.myntelligence.text.tranformation.ScrapperService$$anonfun$main$1$$anonfun$apply$1.apply(ScrapperService.scala:82)
  103. at com.myntelligence.text.tranformation.ScrapperService$$anonfun$main$1$$anonfun$apply$1.apply(ScrapperService.scala:82)
  104. at scala.collection.Iterator$class.foreach(Iterator.scala:893)
  105. at org.apache.spark.util.CompletionIterator.foreach(CompletionIterator.scala:26)
  106. at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:918)
  107. at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:918)
  108. at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1951)
  109. at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1951)
  110. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  111. at org.apache.spark.scheduler.Task.run(Task.scala:99)
  112. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:322)
  113. ... 3 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement