Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- [info] - create Dataset from Fixed Generic Record *** FAILED ***
- [info] org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 99.0 failed 1 times, most recent failure: Lost task 1.0 in stage 99.0 (TID 187, localhost): java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
- [info] named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null) AS fixedUnionVal#2352
- [info] +- named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null)
- [info] :- member0
- [info] :- if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null
- [info] : :- (0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)
- [info] : : :- 0
- [info] : : +- com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion
- [info] : : :- com.databricks.spark.avro.SerializableSchema@6ee1aeaf
- [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : : +- 0
- [info] : :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes
- [info] : : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer))
- [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : : +- 0
- [info] : +- null
- [info] :- member1
- [info] +- if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null
- [info] :- (1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)
- [info] : :- 1
- [info] : +- com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion
- [info] : :- com.databricks.spark.avro.SerializableSchema@45b82eb6
- [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : +- 0
- [info] :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue
- [info] : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer))
- [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : +- 0
- [info] +- null
- [info]
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:279)
- [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
- [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
- [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
- [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
- [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.agg_doAggregateWithoutKey$(generated.java:39)
- [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(generated.java:60)
- [info] at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- [info] at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
- [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369)
- [info] at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125)
- [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
- [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
- [info] at org.apache.spark.scheduler.Task.run(Task.scala:85)
- [info] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
- [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
- [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
- [info] at java.lang.Thread.run(Thread.java:745)
- [info] Caused by: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
- [info] at scala.sys.package$.error(package.scala:27)
- [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method$lzycompute(objects.scala:124)
- [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method(objects.scala:120)
- [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.doGenCode(objects.scala:137)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
- [info] at scala.Option.getOrElse(Option.scala:121)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
- [info] at org.apache.spark.sql.catalyst.expressions.If.doGenCode(conditionalExpressions.scala:60)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
- [info] at scala.Option.getOrElse(Option.scala:121)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
- [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:302)
- [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:301)
- [info] at scala.collection.immutable.List.map(List.scala:273)
- [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct.doGenCode(complexTypeCreator.scala:301)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
- [info] at scala.Option.getOrElse(Option.scala:121)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
- [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
- [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
- [info] at scala.collection.immutable.List.foreach(List.scala:381)
- [info] at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
- [info] at scala.collection.immutable.List.map(List.scala:285)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.generateExpressions(CodeGenerator.scala:740)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.createCode(GenerateUnsafeProjection.scala:299)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:363)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:356)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:821)
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:252)
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:252)
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:276)
- [info] ... 17 more
- [info]
- [info] Driver stacktrace:
- [info] at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1450)
- [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1438)
- [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1437)
- [info] at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
- [info] at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
- [info] at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1437)
- [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
- [info] at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
- [info] at scala.Option.foreach(Option.scala:257)
- [info] at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:811)
- [info] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1659)
- [info] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1618)
- [info] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1607)
- [info] at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
- [info] at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:632)
- [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1871)
- [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1884)
- [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1897)
- [info] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1911)
- [info] at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:893)
- [info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
- [info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
- [info] at org.apache.spark.rdd.RDD.withScope(RDD.scala:358)
- [info] at org.apache.spark.rdd.RDD.collect(RDD.scala:892)
- [info] at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:290)
- [info] at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2183)
- [info] at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
- [info] at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532)
- [info] at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2182)
- [info] at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2189)
- [info] at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2217)
- [info] at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2216)
- [info] at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2545)
- [info] at org.apache.spark.sql.Dataset.count(Dataset.scala:2216)
- [info] at com.databricks.spark.avro.AvroSuite$$anonfun$39.apply$mcV$sp(AvroSuite.scala:994)
- [info] at com.databricks.spark.avro.AvroSuite$$anonfun$39.apply(AvroSuite.scala:962)
- [info] at com.databricks.spark.avro.AvroSuite$$anonfun$39.apply(AvroSuite.scala:962)
- [info] at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
- [info] at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
- [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
- [info] at org.scalatest.Transformer.apply(Transformer.scala:22)
- [info] at org.scalatest.Transformer.apply(Transformer.scala:20)
- [info] at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
- [info] at org.scalatest.Suite$class.withFixture(Suite.scala:1122)
- [info] at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555)
- [info] at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
- [info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
- [info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
- [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
- [info] at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
- [info] at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
- [info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
- [info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
- [info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
- [info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
- [info] at scala.collection.immutable.List.foreach(List.scala:381)
- [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
- [info] at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
- [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
- [info] at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
- [info] at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
- [info] at org.scalatest.Suite$class.run(Suite.scala:1424)
- [info] at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
- [info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
- [info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
- [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
- [info] at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
- [info] at com.databricks.spark.avro.AvroSuite.org$scalatest$BeforeAndAfterAll$$super$run(AvroSuite.scala:43)
- [info] at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
- [info] at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
- [info] at com.databricks.spark.avro.AvroSuite.run(AvroSuite.scala:43)
- [info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357)
- [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502)
- [info] at sbt.TestRunner.runTest$1(TestFramework.scala:76)
- [info] at sbt.TestRunner.run(TestFramework.scala:85)
- [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
- [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
- [info] at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185)
- [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
- [info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
- [info] at sbt.TestFunction.apply(TestFramework.scala:207)
- [info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
- [info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216)
- [info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
- [info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
- [info] at sbt.std.Transform$$anon$4.work(System.scala:63)
- [info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
- [info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
- [info] at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
- [info] at sbt.Execute.work(Execute.scala:237)
- [info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
- [info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
- [info] at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
- [info] at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
- [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
- [info] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
- [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
- [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
- [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
- [info] at java.lang.Thread.run(Thread.java:745)
- [info] Cause: java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
- [info] named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null) AS fixedUnionVal#2352
- [info] +- named_struct(member0, if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null, member1, if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null)
- [info] :- member0
- [info] :- if ((0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes else null
- [info] : :- (0 = com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion)
- [info] : : :- 0
- [info] : : +- com.databricks.spark.avro.SerializableSchema@6ee1aeaf.resolveUnion
- [info] : : :- com.databricks.spark.avro.SerializableSchema@6ee1aeaf
- [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : : +- 0
- [info] : :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer)).bytes
- [info] : : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.nio.ByteBuffer))
- [info] : : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : : +- 0
- [info] : +- null
- [info] :- member1
- [info] +- if ((1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)) objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue else null
- [info] :- (1 = com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion)
- [info] : :- 1
- [info] : +- com.databricks.spark.avro.SerializableSchema@45b82eb6.resolveUnion
- [info] : :- com.databricks.spark.avro.SerializableSchema@45b82eb6
- [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : +- 0
- [info] :- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer)).intValue
- [info] : +- objectcast(input[0, org.apache.avro.generic.GenericData$Record, true].get, ObjectType(class java.lang.Integer))
- [info] : +- input[0, org.apache.avro.generic.GenericData$Record, true].get
- [info] : :- input[0, org.apache.avro.generic.GenericData$Record, true]
- [info] : +- 0
- [info] +- null
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:279)
- [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
- [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
- [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
- [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
- [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.agg_doAggregateWithoutKey$(generated.java:39)
- [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(generated.java:60)
- [info] at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- [info] at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
- [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369)
- [info] at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125)
- [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
- [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
- [info] at org.apache.spark.scheduler.Task.run(Task.scala:85)
- [info] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
- [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
- [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
- [info] at java.lang.Thread.run(Thread.java:745)
- [info] Cause: java.lang.RuntimeException: Couldn't find bytes on class java.nio.ByteBuffer
- [info] at scala.sys.package$.error(package.scala:27)
- [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method$lzycompute(objects.scala:124)
- [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.method(objects.scala:120)
- [info] at org.apache.spark.sql.catalyst.expressions.objects.Invoke.doGenCode(objects.scala:137)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
- [info] at scala.Option.getOrElse(Option.scala:121)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
- [info] at org.apache.spark.sql.catalyst.expressions.If.doGenCode(conditionalExpressions.scala:60)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
- [info] at scala.Option.getOrElse(Option.scala:121)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
- [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:302)
- [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct$$anonfun$doGenCode$5.apply(complexTypeCreator.scala:301)
- [info] at scala.collection.immutable.List.map(List.scala:273)
- [info] at org.apache.spark.sql.catalyst.expressions.CreateNamedStruct.doGenCode(complexTypeCreator.scala:301)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
- [info] at scala.Option.getOrElse(Option.scala:121)
- [info] at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext$$anonfun$generateExpressions$1.apply(CodeGenerator.scala:740)
- [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
- [info] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
- [info] at scala.collection.immutable.List.foreach(List.scala:381)
- [info] at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
- [info] at scala.collection.immutable.List.map(List.scala:285)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.generateExpressions(CodeGenerator.scala:740)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.createCode(GenerateUnsafeProjection.scala:299)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:363)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:356)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
- [info] at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:821)
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:252)
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:252)
- [info] at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:276)
- [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
- [info] at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:439)
- [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
- [info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
- [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.agg_doAggregateWithoutKey$(generated.java:39)
- [info] at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(generated.java:60)
- [info] at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- [info] at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
- [info] at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369)
- [info] at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125)
- [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
- [info] at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
- [info] at org.apache.spark.scheduler.Task.run(Task.scala:85)
- [info] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
- [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
- [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
- [info] at java.lang.Thread.run(Thread.java:745)
Add Comment
Please, Sign In to add comment