Advertisement
Guest User

Untitled

a guest
Jun 18th, 2019
79
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 9.02 KB | None | 0 0
  1. > Py4JJavaError: An error occurred while calling o342.collectToPython. :
  2. > org.apache.spark.SparkException: Job aborted due to stage failure:
  3. > Task 36 in stage 14.0 failed 1 times, most recent failure: Lost task
  4. > 36.0 in stage 14.0 (TID 675, localhost, executor driver): org.apache.spark.api.python.PythonException: Traceback (most recent
  5. > call last): File
  6. > "C:UsersMUM1342Desktopsparkspark-2.4.3-bin-hadoop2.7pythonlibpyspark.zippysparkworker.py",
  7. > line 362, in main File
  8. > "C:UsersMUM1342Desktopsparkspark-2.4.3-bin-hadoop2.7pythonlibpyspark.zippysparkserializers.py",
  9. > line 715, in read_int
  10. > length = stream.read(4) File "C:ProgramDataAnaconda2libsocket.py", line 384, in read
  11. > data = self._sock.recv(left) timeout: timed out
  12. >
  13. > at
  14. > org.apache.spark.api.python.BasePythonRunner$ReaderIterator.handlePythonException(PythonRunner.scala:452)
  15. > at
  16. > org.apache.spark.sql.execution.python.PythonUDFRunner$$anon$1.read(PythonUDFRunner.scala:81)
  17. > at
  18. > org.apache.spark.sql.execution.python.PythonUDFRunner$$anon$1.read(PythonUDFRunner.scala:64)
  19. > at
  20. > org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:406)
  21. > at
  22. > org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)
  23. > at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440) at
  24. > scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at
  25. > scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at
  26. > org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage7.processNext(Unknown
  27. > Source) at
  28. > org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
  29. > at
  30. > org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636)
  31. > at
  32. > org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:255)
  33. > at
  34. > org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247)
  35. > at
  36. > org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836)
  37. > at
  38. > org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836)
  39. > at
  40. > org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
  41. > at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
  42. > at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) at
  43. > org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at
  44. > org.apache.spark.scheduler.Task.run(Task.scala:121) at
  45. > org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
  46. > at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
  47. > at
  48. > org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
  49. > at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
  50. > at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
  51. > at java.lang.Thread.run(Unknown Source)
  52. >
  53. > Driver stacktrace: at
  54. > org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1889)
  55. > at
  56. > org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1877)
  57. > at
  58. > org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1876)
  59. > at
  60. > scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  61. > at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  62. > at
  63. > org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1876)
  64. > at
  65. > org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926)
  66. > at
  67. > org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926)
  68. > at scala.Option.foreach(Option.scala:257) at
  69. > org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926)
  70. > at
  71. > org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2110)
  72. > at
  73. > org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2059)
  74. > at
  75. > org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2048)
  76. > at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
  77. > at
  78. > org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737)
  79. > at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) at
  80. > org.apache.spark.SparkContext.runJob(SparkContext.scala:2082) at
  81. > org.apache.spark.SparkContext.runJob(SparkContext.scala:2101) at
  82. > org.apache.spark.SparkContext.runJob(SparkContext.scala:2126) at
  83. > org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:945) at
  84. > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  85. > at
  86. > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  87. > at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at
  88. > org.apache.spark.rdd.RDD.collect(RDD.scala:944) at
  89. > org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:299)
  90. > at
  91. > org.apache.spark.sql.Dataset$$anonfun$collectToPython$1.apply(Dataset.scala:3257)
  92. > at
  93. > org.apache.spark.sql.Dataset$$anonfun$collectToPython$1.apply(Dataset.scala:3254)
  94. > at org.apache.spark.sql.Dataset$$anonfun$53.apply(Dataset.scala:3364)
  95. > at
  96. > org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
  97. > at
  98. > org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
  99. > at
  100. > org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
  101. > at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3363) at
  102. > org.apache.spark.sql.Dataset.collectToPython(Dataset.scala:3254) at
  103. > sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
  104. > sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source) at
  105. > sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) at
  106. > java.lang.reflect.Method.invoke(Unknown Source) at
  107. > py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at
  108. > py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at
  109. > py4j.Gateway.invoke(Gateway.java:282) at
  110. > py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  111. > at py4j.commands.CallCommand.execute(CallCommand.java:79) at
  112. > py4j.GatewayConnection.run(GatewayConnection.java:238) at
  113. > java.lang.Thread.run(Unknown Source) Caused by:
  114. > org.apache.spark.api.python.PythonException: Traceback (most recent
  115. > call last): File
  116. > "C:UsersMUM1342Desktopsparkspark-2.4.3-bin-hadoop2.7pythonlibpyspark.zippysparkworker.py",
  117. > line 362, in main File
  118. > "C:UsersMUM1342Desktopsparkspark-2.4.3-bin-hadoop2.7pythonlibpyspark.zippysparkserializers.py",
  119. > line 715, in read_int
  120. > length = stream.read(4) File "C:ProgramDataAnaconda2libsocket.py", line 384, in read
  121. > data = self._sock.recv(left) timeout: timed out
  122. >
  123. > at
  124. > org.apache.spark.api.python.BasePythonRunner$ReaderIterator.handlePythonException(PythonRunner.scala:452)
  125. > at
  126. > org.apache.spark.sql.execution.python.PythonUDFRunner$$anon$1.read(PythonUDFRunner.scala:81)
  127. > at
  128. > org.apache.spark.sql.execution.python.PythonUDFRunner$$anon$1.read(PythonUDFRunner.scala:64)
  129. > at
  130. > org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:406)
  131. > at
  132. > org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)
  133. > at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440) at
  134. > scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at
  135. > scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at
  136. > org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage7.processNext(Unknown
  137. > Source) at
  138. > org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
  139. > at
  140. > org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636)
  141. > at
  142. > org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:255)
  143. > at
  144. > org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247)
  145. > at
  146. > org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836)
  147. > at
  148. > org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836)
  149. > at
  150. > org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
  151. > at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
  152. > at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) at
  153. > org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at
  154. > org.apache.spark.scheduler.Task.run(Task.scala:121) at
  155. > org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
  156. > at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
  157. > at
  158. > org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
  159. > at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
  160. > at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
  161. > 1 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement