Guest User

Untitled

a guest
Nov 17th, 2017
366
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 27.31 KB | None | 0 0
  1. [info] - create Dataset from Fixed Generic Record *** FAILED ***
  2. [info] org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 99.0 failed 1 times, most recent failure: Lost task 1.0 in stage 99.0 (TID 187, localhost): java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
  3. [info] named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null) AS fixedUnionVal#2352
  4. [info] +- named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null)
  5. [info] :- member0
  6. [info] :- if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null
  7. [info] : :- (0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)
  8. [info] : : :- 0
  9. [info] : : +- com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion
  10. [info] : : :- com.databricks.spark.avro.SerializableSchema@6ee1aeaf
  11. [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  12. [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  13. [info] : : +- 0
  14. [info] : :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes
  15. [info] : : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer))
  16. [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  17. [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  18. [info] : : +- 0
  19. [info] : +- null
  20. [info] :- member1
  21. [info] +- if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null
  22. [info] :- (1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)
  23. [info] : :- 1
  24. [info] : +- com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion
  25. [info] : :- com.databricks.spark.avro.SerializableSchema@45b82eb6
  26. [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  27. [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  28. [info] : +- 0
  29. [info] :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue
  30. [info] : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer))
  31. [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  32. [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  33. [info] : +- 0
  34. [info] +- null
  35. [info]
  36. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:279)
  37. [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
  38. [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
  39. [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
  40. [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
  41. [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.agg_doAggregateWithoutKey$(generated.java:39)
  42. [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(generated.java:60)
  43. [info] at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
  44. [info] at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
  45. [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369)
  46. [info] at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125)
  47. [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
  48. [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
  49. [info] at org.apache.spark.scheduler.Task.run(Task.scala:85)
  50. [info] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
  51. [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  52. [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  53. [info] at java.lang.Thread.run(Thread.java:745)
  54. [info] Caused by: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
  55. [info] at scala.sys.package$.error(package.scala:27)
  56. [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method$lzycompute(objects.scala:124)
  57. [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method(objects.scala:120)
  58. [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.doGenCode(objects.scala:137)
  59. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
  60. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
  61. [info] at scala.Option.getOrElse(Option.scala:121)
  62. [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
  63. [info] at org.apache.spark.sql.catalyst.expressions.If.doGenCode(conditionalExpressions.scala:60)
  64. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
  65. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
  66. [info] at scala.Option.getOrElse(Option.scala:121)
  67. [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
  68. [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:302)
  69. [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:301)
  70. [info] at scala.collection.immutable.List.map(List.scala:273)
  71. [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct.doGenCode(complexTypeCreator.scala:301)
  72. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
  73. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
  74. [info] at scala.Option.getOrElse(Option.scala:121)
  75. [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
  76. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
  77. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
  78. [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
  79. [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
  80. [info] at scala.collection.immutable.List.foreach(List.scala:381)
  81. [info] at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
  82. [info] at scala.collection.immutable.List.map(List.scala:285)
  83. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.generateExpressions(CodeGenerator.scala:740)
  84. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.createCode(GenerateUnsafeProjection.scala:299)
  85. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:363)
  86. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:356)
  87. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
  88. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:821)
  89. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:252)
  90. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:252)
  91. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:276)
  92. [info] ... 17 more
  93. [info]
  94. [info] Driver stacktrace:
  95. [info] at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1450)
  96. [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1438)
  97. [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1437)
  98. [info] at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  99. [info] at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  100. [info] at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1437)
  101. [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
  102. [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
  103. [info] at scala.Option.foreach(Option.scala:257)
  104. [info] at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:811)
  105. [info] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1659)
  106. [info] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1618)
  107. [info] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1607)
  108. [info] at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  109. [info] at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:632)
  110. [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1871)
  111. [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1884)
  112. [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1897)
  113. [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1911)
  114. [info] at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:893)
  115. [info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  116. [info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  117. [info] at org.apache.spark.rdd.RDD.withScope(RDD.scala:358)
  118. [info] at org.apache.spark.rdd.RDD.collect(RDD.scala:892)
  119. [info] at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:290)
  120. [info] at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2183)
  121. [info] at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
  122. [info] at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532)
  123. [info] at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2182)
  124. [info] at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2189)
  125. [info] at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2217)
  126. [info] at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2216)
  127. [info] at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2545)
  128. [info] at org.apache.spark.sql.Dataset.count(Dataset.scala:2216)
  129. [info] at com.databricks.spark.avro.AvroSuite$$anonfun$39.apply$mcV$sp(AvroSuite.scala:994)
  130. [info] at com.databricks.spark.avro.AvroSuite$$anonfun$39.apply(AvroSuite.scala:962)
  131. [info] at com.databricks.spark.avro.AvroSuite$$anonfun$39.apply(AvroSuite.scala:962)
  132. [info] at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
  133. [info] at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
  134. [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
  135. [info] at org.scalatest.Transformer.apply(Transformer.scala:22)
  136. [info] at org.scalatest.Transformer.apply(Transformer.scala:20)
  137. [info] at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
  138. [info] at org.scalatest.Suite$class.withFixture(Suite.scala:1122)
  139. [info] at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555)
  140. [info] at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
  141. [info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
  142. [info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
  143. [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
  144. [info] at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
  145. [info] at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
  146. [info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
  147. [info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
  148. [info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
  149. [info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
  150. [info] at scala.collection.immutable.List.foreach(List.scala:381)
  151. [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
  152. [info] at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
  153. [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
  154. [info] at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
  155. [info] at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
  156. [info] at org.scalatest.Suite$class.run(Suite.scala:1424)
  157. [info] at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
  158. [info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
  159. [info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
  160. [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
  161. [info] at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
  162. [info] at com.databricks.spark.avro.AvroSuite.org$scalatest$BeforeAndAfterAll$$super$run(AvroSuite.scala:43)
  163. [info] at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
  164. [info] at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
  165. [info] at com.databricks.spark.avro.AvroSuite.run(AvroSuite.scala:43)
  166. [info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357)
  167. [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502)
  168. [info] at sbt.TestRunner.runTest$1(TestFramework.scala:76)
  169. [info] at sbt.TestRunner.run(TestFramework.scala:85)
  170. [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
  171. [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
  172. [info] at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185)
  173. [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
  174. [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
  175. [info] at sbt.TestFunction.apply(TestFramework.scala:207)
  176. [info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
  177. [info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
  178. [info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
  179. [info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
  180. [info] at sbt.std.Transform$$anon$4.work(System.scala:63)
  181. [info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
  182. [info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
  183. [info] at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
  184. [info] at sbt.Execute.work(Execute.scala:237)
  185. [info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
  186. [info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
  187. [info] at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
  188. [info] at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
  189. [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  190. [info] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
  191. [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  192. [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  193. [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  194. [info] at java.lang.Thread.run(Thread.java:745)
  195. [info] Cause: java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
  196. [info] named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null) AS fixedUnionVal#2352
  197. [info] +- named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null)
  198. [info] :- member0
  199. [info] :- if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null
  200. [info] : :- (0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)
  201. [info] : : :- 0
  202. [info] : : +- com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion
  203. [info] : : :- com.databricks.spark.avro.SerializableSchema@6ee1aeaf
  204. [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  205. [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  206. [info] : : +- 0
  207. [info] : :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes
  208. [info] : : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer))
  209. [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  210. [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  211. [info] : : +- 0
  212. [info] : +- null
  213. [info] :- member1
  214. [info] +- if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null
  215. [info] :- (1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)
  216. [info] : :- 1
  217. [info] : +- com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion
  218. [info] : :- com.databricks.spark.avro.SerializableSchema@45b82eb6
  219. [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  220. [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  221. [info] : +- 0
  222. [info] :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue
  223. [info] : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer))
  224. [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
  225. [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
  226. [info] : +- 0
  227. [info] +- null
  228. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:279)
  229. [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
  230. [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
  231. [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
  232. [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
  233. [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.agg_doAggregateWithoutKey$(generated.java:39)
  234. [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(generated.java:60)
  235. [info] at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
  236. [info] at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
  237. [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369)
  238. [info] at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125)
  239. [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
  240. [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
  241. [info] at org.apache.spark.scheduler.Task.run(Task.scala:85)
  242. [info] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
  243. [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  244. [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  245. [info] at java.lang.Thread.run(Thread.java:745)
  246. [info] Cause: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
  247. [info] at scala.sys.package$.error(package.scala:27)
  248. [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method$lzycompute(objects.scala:124)
  249. [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method(objects.scala:120)
  250. [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.doGenCode(objects.scala:137)
  251. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
  252. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
  253. [info] at scala.Option.getOrElse(Option.scala:121)
  254. [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
  255. [info] at org.apache.spark.sql.catalyst.expressions.If.doGenCode(conditionalExpressions.scala:60)
  256. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
  257. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
  258. [info] at scala.Option.getOrElse(Option.scala:121)
  259. [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
  260. [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:302)
  261. [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:301)
  262. [info] at scala.collection.immutable.List.map(List.scala:273)
  263. [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct.doGenCode(complexTypeCreator.scala:301)
  264. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
  265. [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
  266. [info] at scala.Option.getOrElse(Option.scala:121)
  267. [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
  268. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
  269. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
  270. [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
  271. [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
  272. [info] at scala.collection.immutable.List.foreach(List.scala:381)
  273. [info] at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
  274. [info] at scala.collection.immutable.List.map(List.scala:285)
  275. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.generateExpressions(CodeGenerator.scala:740)
  276. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.createCode(GenerateUnsafeProjection.scala:299)
  277. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:363)
  278. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:356)
  279. [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
  280. [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:821)
  281. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:252)
  282. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:252)
  283. [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:276)
  284. [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
  285. [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
  286. [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
  287. [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
  288. [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.agg_doAggregateWithoutKey$(generated.java:39)
  289. [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(generated.java:60)
  290. [info] at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
  291. [info] at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
  292. [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369)
  293. [info] at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125)
  294. [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
  295. [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
  296. [info] at org.apache.spark.scheduler.Task.run(Task.scala:85)
  297. [info] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
  298. [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  299. [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  300. [info] at java.lang.Thread.run(Thread.java:745)
Add Comment
Please, Sign In to add comment