Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ERROR Executor: Exception in task 1.0 in stage 5.0 (TID 11)
- java.lang.NegativeArraySizeException
- at org.apache.spark.unsafe.types.UTF8String.getBytes(UTF8String.java:297)
- at org.apache.spark.unsafe.types.UTF8String.toString(UTF8String.java:1214)
- at org.apache.spark.sql.catalyst.json.JacksonGenerator$$anonfun$org$apache$spark$sql$catalyst$json$JacksonGenerator$$makeWriter$9.apply(JacksonGenerator.scala:112)
- at org.apache.spark.sql.catalyst.json.JacksonGenerator$$anonfun$org$apache$spark$sql$catalyst$json$JacksonGenerator$$makeWriter$9.apply(JacksonGenerator.scala:111)
- at org.apache.spark.sql.catalyst.json.JacksonGenerator.org$apache$spark$sql$catalyst$json$JacksonGenerator$$writeFields(JacksonGenerator.scala:176)
- at org.apache.spark.sql.catalyst.json.JacksonGenerator$$anonfun$write$1.apply$mcV$sp(JacksonGenerator.scala:228)
- at org.apache.spark.sql.catalyst.json.JacksonGenerator.org$apache$spark$sql$catalyst$json$JacksonGenerator$$writeObject(JacksonGenerator.scala:165)
- at org.apache.spark.sql.catalyst.json.JacksonGenerator.write(JacksonGenerator.scala:228)
- at org.apache.spark.sql.Dataset$$anonfun$toJSON$1$$anon$1.next(Dataset.scala:3203)
- at org.apache.spark.sql.Dataset$$anonfun$toJSON$1$$anon$1.next(Dataset.scala:3200)
- at scala.collection.Iterator$$anon$11.next(Iterator.scala:410)
- at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source)
- at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636)
- at org.apache.spark.sql.kafka010.KafkaWriteTask.execute(KafkaWriteTask.scala:45)
- at org.apache.spark.sql.kafka010.KafkaWriter$$anonfun$write$1$$anonfun$apply$1.apply$mcV$sp(KafkaWriter.scala:89)
- at org.apache.spark.sql.kafka010.KafkaWriter$$anonfun$write$1$$anonfun$apply$1.apply(KafkaWriter.scala:89)
- at org.apache.spark.sql.kafka010.KafkaWriter$$anonfun$write$1$$anonfun$apply$1.apply(KafkaWriter.scala:89)
- at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
- at org.apache.spark.sql.kafka010.KafkaWriter$$anonfun$write$1.apply(KafkaWriter.scala:89)
- at org.apache.spark.sql.kafka010.KafkaWriter$$anonfun$write$1.apply(KafkaWriter.scala:87)
- at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$28.apply(RDD.scala:935)
- at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$28.apply(RDD.scala:935)
- at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
- at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
- at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
- at org.apache.spark.scheduler.Task.run(Task.scala:121)
- at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
- at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
- at java.lang.Thread.run(Thread.java:748)
- .set("spark.serializer", KryoSerializer.class.getCanonicalName())
- .set("spark.kryo.registrationRequired", "true")
- .set("es.batch.size.entries", "1500")
- .set("spark.kryo.registrator", "...CustomKryoRegistrator")
- public class CustomKryoRegistrator implements KryoRegistrator {
- @Override
- public void registerClasses(Kryo kryo) {
- kryo.register(StructType[].class);
- kryo.register(StructType.class);
- kryo.register(StructField[].class);
- kryo.register(StructField.class);
- kryo.register(IntegerType$.class);
- kryo.register(Metadata.class);
- kryo.register(StringType$.class);
- kryo.register(LongType$.class);
- kryo.register(BooleanType$.class);
- kryo.register(ArrayType.class);
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement