Advertisement
Guest User

Untitled

a guest
Mar 18th, 2019
223
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 15.30 KB | None | 0 0
  1. User class threw exception: org.apache.spark.SparkException: Job aborted.
  2. at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:96)
  3. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1096)
  4. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
  5. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
  6. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  7. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  8. at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
  9. at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1094)
  10. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:1067)
  11. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
  12. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
  13. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  14. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  15. at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
  16. at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:1032)
  17. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply$mcV$sp(PairRDDFunctions.scala:958)
  18. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
  19. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
  20. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  21. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  22. at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
  23. at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:957)
  24. at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply$mcV$sp(RDD.scala:1493)
  25. at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
  26. at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
  27. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  28. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  29. at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
  30. at org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1472)
  31. at org.apache.spark.api.java.JavaRDDLike$class.saveAsTextFile(JavaRDDLike.scala:550)
  32. at org.apache.spark.api.java.AbstractJavaRDDLike.saveAsTextFile(JavaRDDLike.scala:45)
  33. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  34. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  35. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  36. at java.lang.reflect.Method.invoke(Method.java:498)
  37. at clojure.lang.Reflector.invokeMatchingMethod(Reflector.java:93)
  38. at clojure.lang.Reflector.invokeInstanceMethod(Reflector.java:28)
  39. at flambo.api$save_as_text_file.invokeStatic(api.clj:434)
  40. at flambo.api$save_as_text_file.invoke(api.clj:428)
  41. at place_attach_cli.place_attach_spark$save_txt.invokeStatic(place_attach_spark.clj:141)
  42. at place_attach_cli.place_attach_spark$save_txt.invoke(place_attach_spark.clj:138)
  43. at place_attach_cli.place_attach_spark$handle_queries.invokeStatic(place_attach_spark.clj:152)
  44. at place_attach_cli.place_attach_spark$handle_queries.invoke(place_attach_spark.clj:143)
  45. at place_attach_cli.spark_cli$_main.invokeStatic(spark_cli.clj:73)
  46. at place_attach_cli.spark_cli$_main.doInvoke(spark_cli.clj:67)
  47. at clojure.lang.RestFn.applyTo(RestFn.java:137)
  48. at place_attach_cli.spark_cli.main(Unknown Source)
  49. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  50. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  51. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  52. at java.lang.reflect.Method.invoke(Method.java:498)
  53. at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$4.run(ApplicationMaster.scala:721)
  54. Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 4 times, most recent failure: Lost task 0.3 in stage 4.0 (TID 409, d572.la.prod.factual.com, executor 20): org.apache.spark.SparkException: Task failed while writing rows
  55. at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:151)
  56. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
  57. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
  58. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  59. at org.apache.spark.scheduler.Task.run(Task.scala:109)
  60. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
  61. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  62. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  63. at java.lang.Thread.run(Thread.java:748)
  64. Caused by: java.lang.NoSuchMethodError: org.apache.thrift.TBaseHelper.hashCode(J)I
  65. at com.factual.tile.api.v3.IDWrapper.hashCode(IDWrapper.java:268)
  66. at java.util.HashMap.hash(HashMap.java:339)
  67. at java.util.HashMap.put(HashMap.java:612)
  68. at java.util.HashSet.add(HashSet.java:220)
  69. at com.factual.tile.api.v3.FactualPlaceData$FactualPlaceDataStandardScheme.read(FactualPlaceData.java:1928)
  70. at com.factual.tile.api.v3.FactualPlaceData$FactualPlaceDataStandardScheme.read(FactualPlaceData.java:1751)
  71. at com.factual.tile.api.v3.FactualPlaceData.read(FactualPlaceData.java:1541)
  72. at com.factual.tile.api.v3.Feature$FeatureStandardScheme.read(Feature.java:724)
  73. at com.factual.tile.api.v3.Feature$FeatureStandardScheme.read(Feature.java:709)
  74. at com.factual.tile.api.v3.Feature.read(Feature.java:589)
  75. at com.factual.tile.api.v3.Tile$TileStandardScheme.read(Tile.java:424)
  76. at com.factual.tile.api.v3.Tile$TileStandardScheme.read(Tile.java:395)
  77. at com.factual.tile.api.v3.Tile.read(Tile.java:332)
  78. at org.apache.thrift.TDeserializer.deserialize(TDeserializer.java:81)
  79. at org.apache.thrift.TDeserializer.deserialize(TDeserializer.java:67)
  80. at com.factual.tile.api.Utils.decodeThrift(Utils.java:42)
  81. at com.factual.tile.adaptor.v3.TileAdaptor.fromThriftBytes(TileAdaptor.java:44)
  82. at com.factual.tile.adaptor.v3.TileAdaptor.fromTileBuffer(TileAdaptor.java:55)
  83. at com.factual.tile.adaptor.TileAdaptor.fromTileBuffer(TileAdaptor.java:28)
  84. at com.factual.tile.reader.TileSet.lambda$new$0(TileSet.java:19)
  85. at com.google.common.collect.Iterators$8.transform(Iterators.java:860)
  86. at com.google.common.collect.TransformedIterator.next(TransformedIterator.java:48)
  87. at com.factual.placeattach.PlaceAttachmentHDFS.preloadTiles(PlaceAttachmentHDFS.java:92)
  88. at com.factual.placeattach.PlaceAttachmentHDFS.ensureTiles(PlaceAttachmentHDFS.java:71)
  89. at com.factual.placeattach.PlaceAttachment.placeAttach(PlaceAttachment.java:153)
  90. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  91. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  92. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  93. at java.lang.reflect.Method.invoke(Method.java:498)
  94. at clojure.lang.Reflector.invokeMatchingMethod(Reflector.java:93)
  95. at clojure.lang.Reflector.invokeInstanceMethod(Reflector.java:28)
  96. at place_attach_cli.place_attach_spark$perform_pa_single.invokeStatic(place_attach_spark.clj:45)
  97. at place_attach_cli.place_attach_spark$perform_pa_single.invoke(place_attach_spark.clj:39)
  98. at clojure.core$partial$fn__4763.invoke(core.clj:2529)
  99. at place_attach_cli.place_attach_spark$eval1867$place_attach_cli_DOT_place_attach_spark_sfn____1868.invoke(NO_SOURCE_FILE:68)
  100. at clojure.lang.AFn.applyToHelper(AFn.java:154)
  101. at clojure.lang.AFn.applyTo(AFn.java:144)
  102. at clojure.lang.AFunction$1.doInvoke(AFunction.java:29)
  103. at clojure.lang.RestFn.applyTo(RestFn.java:137)
  104. at clojure.core$apply.invokeStatic(core.clj:646)
  105. at clojure.core$apply.invoke(core.clj:641)
  106. at flambo.function$Function_call.invokeStatic(function.clj:57)
  107. at flambo.function$Function_call.doInvoke(function.clj:57)
  108. at clojure.lang.RestFn.invoke(RestFn.java:423)
  109. at flambo.function.Function.call(Unknown Source)
  110. at org.apache.spark.api.java.JavaPairRDD$$anonfun$toScalaFunction$1.apply(JavaPairRDD.scala:1040)
  111. at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
  112. at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
  113. at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
  114. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:125)
  115. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:123)
  116. at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
  117. at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:135)
  118. ... 8 more
  119. Driver stacktrace:
  120. at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1602)
  121. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1590)
  122. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1589)
  123. at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  124. at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  125. at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1589)
  126. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
  127. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
  128. at scala.Option.foreach(Option.scala:257)
  129. at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
  130. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1823)
  131. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1772)
  132. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1761)
  133. at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  134. at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
  135. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2034)
  136. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2055)
  137. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2087)
  138. at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:78)
  139. ... 51 more
  140. Caused by: org.apache.spark.SparkException: Task failed while writing rows
  141. at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:151)
  142. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
  143. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
  144. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  145. at org.apache.spark.scheduler.Task.run(Task.scala:109)
  146. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
  147. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  148. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  149. at java.lang.Thread.run(Thread.java:748)
  150. Caused by: java.lang.NoSuchMethodError: org.apache.thrift.TBaseHelper.hashCode(J)I
  151. at com.factual.tile.api.v3.IDWrapper.hashCode(IDWrapper.java:268)
  152. at java.util.HashMap.hash(HashMap.java:339)
  153. at java.util.HashMap.put(HashMap.java:612)
  154. at java.util.HashSet.add(HashSet.java:220)
  155. at com.factual.tile.api.v3.FactualPlaceData$FactualPlaceDataStandardScheme.read(FactualPlaceData.java:1928)
  156. at com.factual.tile.api.v3.FactualPlaceData$FactualPlaceDataStandardScheme.read(FactualPlaceData.java:1751)
  157. at com.factual.tile.api.v3.FactualPlaceData.read(FactualPlaceData.java:1541)
  158. at com.factual.tile.api.v3.Feature$FeatureStandardScheme.read(Feature.java:724)
  159. at com.factual.tile.api.v3.Feature$FeatureStandardScheme.read(Feature.java:709)
  160. at com.factual.tile.api.v3.Feature.read(Feature.java:589)
  161. at com.factual.tile.api.v3.Tile$TileStandardScheme.read(Tile.java:424)
  162. at com.factual.tile.api.v3.Tile$TileStandardScheme.read(Tile.java:395)
  163. at com.factual.tile.api.v3.Tile.read(Tile.java:332)
  164. at org.apache.thrift.TDeserializer.deserialize(TDeserializer.java:81)
  165. at org.apache.thrift.TDeserializer.deserialize(TDeserializer.java:67)
  166. at com.factual.tile.api.Utils.decodeThrift(Utils.java:42)
  167. at com.factual.tile.adaptor.v3.TileAdaptor.fromThriftBytes(TileAdaptor.java:44)
  168. at com.factual.tile.adaptor.v3.TileAdaptor.fromTileBuffer(TileAdaptor.java:55)
  169. at com.factual.tile.adaptor.TileAdaptor.fromTileBuffer(TileAdaptor.java:28)
  170. at com.factual.tile.reader.TileSet.lambda$new$0(TileSet.java:19)
  171. at com.google.common.collect.Iterators$8.transform(Iterators.java:860)
  172. at com.google.common.collect.TransformedIterator.next(TransformedIterator.java:48)
  173. at com.factual.placeattach.PlaceAttachmentHDFS.preloadTiles(PlaceAttachmentHDFS.java:92)
  174. at com.factual.placeattach.PlaceAttachmentHDFS.ensureTiles(PlaceAttachmentHDFS.java:71)
  175. at com.factual.placeattach.PlaceAttachment.placeAttach(PlaceAttachment.java:153)
  176. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  177. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  178. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  179. at java.lang.reflect.Method.invoke(Method.java:498)
  180. at clojure.lang.Reflector.invokeMatchingMethod(Reflector.java:93)
  181. at clojure.lang.Reflector.invokeInstanceMethod(Reflector.java:28)
  182. at place_attach_cli.place_attach_spark$perform_pa_single.invokeStatic(place_attach_spark.clj:45)
  183. at place_attach_cli.place_attach_spark$perform_pa_single.invoke(place_attach_spark.clj:39)
  184. at clojure.core$partial$fn__4763.invoke(core.clj:2529)
  185. at place_attach_cli.place_attach_spark$eval1867$place_attach_cli_DOT_place_attach_spark_sfn____1868.invoke(NO_SOURCE_FILE:68)
  186. at clojure.lang.AFn.applyToHelper(AFn.java:154)
  187. at clojure.lang.AFn.applyTo(AFn.java:144)
  188. at clojure.lang.AFunction$1.doInvoke(AFunction.java:29)
  189. at clojure.lang.RestFn.applyTo(RestFn.java:137)
  190. at clojure.core$apply.invokeStatic(core.clj:646)
  191. at clojure.core$apply.invoke(core.clj:641)
  192. at flambo.function$Function_call.invokeStatic(function.clj:57)
  193. at flambo.function$Function_call.doInvoke(function.clj:57)
  194. at clojure.lang.RestFn.invoke(RestFn.java:423)
  195. at flambo.function.Function.call(Unknown Source)
  196. at org.apache.spark.api.java.JavaPairRDD$$anonfun$toScalaFunction$1.apply(JavaPairRDD.scala:1040)
  197. at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
  198. at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
  199. at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
  200. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:125)
  201. at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:123)
  202. at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
  203. at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:135)
  204. ... 8 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement