Advertisement
Guest User

Untitled

a guest
Oct 14th, 2019
108
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 13.96 KB | None | 0 0
  1. Traceback (most recent call last):
  2. File "<stdin>", line 1, in <module>
  3. File "/Users/marwanbaghdad/miniconda3/envs/pyspark/lib/python3.6/site-packages/pyspark/sql/dataframe.py", line 378, in show
  4. print(self._jdf.showString(n, 20, vertical))
  5. File "/Users/marwanbaghdad/miniconda3/envs/pyspark/lib/python3.6/site-packages/pyspark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py", line 1257, in __call__
  6. File "/Users/marwanbaghdad/miniconda3/envs/pyspark/lib/python3.6/site-packages/pyspark/sql/utils.py", line 63, in deco
  7. return f(*a, **kw)
  8. File "/Users/marwanbaghdad/miniconda3/envs/pyspark/lib/python3.6/site-packages/pyspark/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py", line 328, in get_return_value
  9. py4j.protocol.Py4JJavaError: An error occurred while calling o302.showString.
  10. : org.apache.spark.SparkException: Exception thrown in awaitResult:
  11. at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:226)
  12. at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:146)
  13. at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:387)
  14. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:144)
  15. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:140)
  16. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  17. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  18. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  19. at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:140)
  20. at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:117)
  21. at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenInner(BroadcastHashJoinExec.scala:211)
  22. at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:101)
  23. at org.apache.spark.sql.execution.CodegenSupport$class.constructDoConsumeFunction(WholeStageCodegenExec.scala:216)
  24. at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:187)
  25. at org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:35)
  26. at org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:65)
  27. at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:189)
  28. at org.apache.spark.sql.execution.GlobalLimitExec.consume(limit.scala:108)
  29. at org.apache.spark.sql.execution.BaseLimitExec$class.doConsume(limit.scala:87)
  30. at org.apache.spark.sql.execution.GlobalLimitExec.doConsume(limit.scala:108)
  31. at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:189)
  32. at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:374)
  33. at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:403)
  34. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:90)
  35. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:85)
  36. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  37. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  38. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  39. at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:85)
  40. at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:374)
  41. at org.apache.spark.sql.execution.BaseLimitExec$class.doProduce(limit.scala:70)
  42. at org.apache.spark.sql.execution.GlobalLimitExec.doProduce(limit.scala:108)
  43. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:90)
  44. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:85)
  45. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  46. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  47. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  48. at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:85)
  49. at org.apache.spark.sql.execution.GlobalLimitExec.produce(limit.scala:108)
  50. at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:45)
  51. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:90)
  52. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:85)
  53. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  54. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  55. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  56. at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:85)
  57. at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:35)
  58. at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:96)
  59. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:90)
  60. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:85)
  61. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  62. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  63. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  64. at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:85)
  65. at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)
  66. at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:96)
  67. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:90)
  68. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:85)
  69. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  70. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  71. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  72. at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:85)
  73. at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)
  74. at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:45)
  75. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:90)
  76. at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:85)
  77. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  78. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  79. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  80. at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:85)
  81. at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:35)
  82. at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:544)
  83. at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:598)
  84. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
  85. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
  86. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  87. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  88. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  89. at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
  90. at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)
  91. at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:339)
  92. at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:38)
  93. at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3383)
  94. at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2544)
  95. at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2544)
  96. at org.apache.spark.sql.Dataset$$anonfun$53.apply(Dataset.scala:3364)
  97. at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
  98. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
  99. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
  100. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3363)
  101. at org.apache.spark.sql.Dataset.head(Dataset.scala:2544)
  102. at org.apache.spark.sql.Dataset.take(Dataset.scala:2758)
  103. at org.apache.spark.sql.Dataset.getRows(Dataset.scala:254)
  104. at org.apache.spark.sql.Dataset.showString(Dataset.scala:291)
  105. at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  106. at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  107. at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  108. at java.base/java.lang.reflect.Method.invoke(Method.java:567)
  109. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  110. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  111. at py4j.Gateway.invoke(Gateway.java:282)
  112. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  113. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  114. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  115. at java.base/java.lang.Thread.run(Thread.java:835)
  116. Caused by: java.lang.IllegalArgumentException: Unsupported class file major version 56
  117. at org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:166)
  118. at org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:148)
  119. at org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:136)
  120. at org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:237)
  121. at org.apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:49)
  122. at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:517)
  123. at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:500)
  124. at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
  125. at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
  126. at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
  127. at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:236)
  128. at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
  129. at scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:134)
  130. at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
  131. at org.apache.spark.util.FieldAccessFinder$$anon$3.visitMethodInsn(ClosureCleaner.scala:500)
  132. at org.apache.xbean.asm6.ClassReader.readCode(ClassReader.java:2175)
  133. at org.apache.xbean.asm6.ClassReader.readMethod(ClassReader.java:1238)
  134. at org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:631)
  135. at org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:355)
  136. at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:307)
  137. at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:306)
  138. at scala.collection.immutable.List.foreach(List.scala:392)
  139. at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:306)
  140. at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:162)
  141. at org.apache.spark.SparkContext.clean(SparkContext.scala:2326)
  142. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2100)
  143. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2126)
  144. at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:945)
  145. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  146. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  147. at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
  148. at org.apache.spark.rdd.RDD.collect(RDD.scala:944)
  149. at org.apache.spark.sql.execution.SparkPlan.executeCollectIterator(SparkPlan.scala:306)
  150. at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:79)
  151. at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:76)
  152. at org.apache.spark.sql.execution.SQLExecution$$anonfun$withExecutionId$1.apply(SQLExecution.scala:101)
  153. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
  154. at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:98)
  155. at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:75)
  156. at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:75)
  157. at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
  158. at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
  159. at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
  160. at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
  161. ... 1 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement