Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- mahout> val drmData = drmParallelize(dense(
- | (2, 2, 10.5, 10, 29.509541), // Apple Cinnamon Cheerios
- | (1, 2, 12, 12, 18.042851), // Cap'n'Crunch
- | (1, 1, 12, 13, 22.736446), // Cocoa Puffs
- | (2, 1, 11, 13, 32.207582), // Froot Loops
- | (1, 2, 12, 11, 21.871292), // Honey Graham Ohs
- | (2, 1, 16, 8, 36.187559), // Wheaties Honey Gold
- | (6, 2, 17, 1, 50.764999), // Cheerios
- | (3, 2, 13, 7, 40.400208), // Clusters
- | (3, 3, 13, 4, 45.811716)), // Great Grains Pecan
- | numPartitions = 2);
- drmData: org.apache.mahout.math.drm.CheckpointedDrm[Int] = org.apache.mahout.sparkbindings.drm.CheckpointedDrmSpark@8025f63
- mahout>
- mahout>
- mahout>
- mahout>
- mahout> val drmX = drmData(::, 0 until 4)
- 14/08/14 15:10:47 INFO SparkContext: Starting job: fold at CheckpointedDrmSpark.scala:192
- 14/08/14 15:10:47 INFO DAGScheduler: Got job 0 (fold at CheckpointedDrmSpark.scala:192) with 2 output partitions (allowLocal=false)
- 14/08/14 15:10:47 INFO DAGScheduler: Final stage: Stage 0(fold at CheckpointedDrmSpark.scala:192)
- 14/08/14 15:10:47 INFO DAGScheduler: Parents of final stage: List()
- 14/08/14 15:10:47 INFO DAGScheduler: Missing parents: List()
- 14/08/14 15:10:47 INFO DAGScheduler: Submitting Stage 0 (MappedRDD[1] at map at CheckpointedDrmSpark.scala:192), which has no missing parents
- 14/08/14 15:10:47 INFO DAGScheduler: Submitting 2 missing tasks from Stage 0 (MappedRDD[1] at map at CheckpointedDrmSpark.scala:192)
- 14/08/14 15:10:47 INFO TaskSchedulerImpl: Adding task set 0.0 with 2 tasks
- 14/08/14 15:10:47 INFO TaskSetManager: Starting task 0.0:0 as TID 0 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:47 INFO TaskSetManager: Serialized task 0.0:0 as 1969 bytes in 9 ms
- 14/08/14 15:10:47 INFO TaskSetManager: Starting task 0.0:1 as TID 1 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:47 INFO TaskSetManager: Serialized task 0.0:1 as 2022 bytes in 4 ms
- 14/08/14 15:10:48 WARN TaskSetManager: Lost TID 1 (task 0.0:1)
- 14/08/14 15:10:48 WARN TaskSetManager: Loss was due to java.lang.ArrayStoreException
- java.lang.ArrayStoreException: scala.Tuple3
- at com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.read(DefaultArraySerializers.java:338)
- at com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.read(DefaultArraySerializers.java:293)
- at com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:729)
- at com.twitter.chill.WrappedArraySerializer.read(WrappedArraySerializer.scala:34)
- at com.twitter.chill.WrappedArraySerializer.read(WrappedArraySerializer.scala:21)
- at com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:729)
- at org.apache.spark.serializer.KryoDeserializationStream.readObject(KryoSerializer.scala:118)
- at org.apache.spark.rdd.ParallelCollectionPartition$$anonfun$readObject$1.apply(ParallelCollectionRDD.scala:80)
- at org.apache.spark.rdd.ParallelCollectionPartition$$anonfun$readObject$1.apply(ParallelCollectionRDD.scala:80)
- at org.apache.spark.util.Utils$.deserializeViaNestedStream(Utils.scala:120)
- at org.apache.spark.rdd.ParallelCollectionPartition.readObject(ParallelCollectionRDD.scala:80)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:606)
- at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.skipCustomData(ObjectInputStream.java:1956)
- at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1850)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:165)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
- at java.lang.Thread.run(Thread.java:744)
- 14/08/14 15:10:48 WARN TaskSetManager: Lost TID 0 (task 0.0:0)
- 14/08/14 15:10:48 INFO TaskSetManager: Loss was due to java.lang.ArrayStoreException: scala.Tuple3 [duplicate 1]
- 14/08/14 15:10:48 INFO TaskSetManager: Starting task 0.0:0 as TID 2 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:48 INFO TaskSetManager: Serialized task 0.0:0 as 1969 bytes in 7 ms
- 14/08/14 15:10:48 INFO TaskSetManager: Starting task 0.0:1 as TID 3 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:48 INFO TaskSetManager: Serialized task 0.0:1 as 2022 bytes in 7 ms
- 14/08/14 15:10:48 WARN TaskSetManager: Lost TID 2 (task 0.0:0)
- 14/08/14 15:10:48 INFO TaskSetManager: Loss was due to java.lang.ArrayStoreException: scala.Tuple3 [duplicate 2]
- 14/08/14 15:10:48 INFO TaskSetManager: Starting task 0.0:0 as TID 4 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:48 INFO TaskSetManager: Serialized task 0.0:0 as 1969 bytes in 4 ms
- 14/08/14 15:10:48 WARN TaskSetManager: Lost TID 3 (task 0.0:1)
- 14/08/14 15:10:48 INFO TaskSetManager: Loss was due to java.lang.ArrayStoreException: scala.Tuple3 [duplicate 3]
- 14/08/14 15:10:48 INFO TaskSetManager: Starting task 0.0:1 as TID 5 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:48 INFO TaskSetManager: Serialized task 0.0:1 as 2022 bytes in 10 ms
- 14/08/14 15:10:48 WARN TaskSetManager: Lost TID 4 (task 0.0:0)
- 14/08/14 15:10:48 INFO TaskSetManager: Loss was due to java.lang.ArrayStoreException: scala.Tuple3 [duplicate 4]
- 14/08/14 15:10:48 INFO TaskSetManager: Starting task 0.0:0 as TID 6 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:48 INFO TaskSetManager: Serialized task 0.0:0 as 1969 bytes in 7 ms
- 14/08/14 15:10:48 WARN TaskSetManager: Lost TID 5 (task 0.0:1)
- 14/08/14 15:10:48 INFO TaskSetManager: Loss was due to java.lang.ArrayStoreException: scala.Tuple3 [duplicate 5]
- 14/08/14 15:10:48 INFO TaskSetManager: Starting task 0.0:1 as TID 7 on executor 0: vagrant-ubuntu-trusty-64 (PROCESS_LOCAL)
- 14/08/14 15:10:48 INFO TaskSetManager: Serialized task 0.0:1 as 2022 bytes in 4 ms
- 14/08/14 15:10:48 WARN TaskSetManager: Lost TID 6 (task 0.0:0)
- 14/08/14 15:10:48 INFO TaskSetManager: Loss was due to java.lang.ArrayStoreException: scala.Tuple3 [duplicate 6]
- 14/08/14 15:10:48 ERROR TaskSetManager: Task 0.0:0 failed 4 times; aborting job
- 14/08/14 15:10:48 INFO TaskSchedulerImpl: Cancelling stage 0
- 14/08/14 15:10:48 INFO TaskSchedulerImpl: Stage 0 was cancelled
- 14/08/14 15:10:48 INFO TaskSetManager: Loss was due to java.lang.ArrayStoreException: scala.Tuple3 [duplicate 7]
- 14/08/14 15:10:48 INFO DAGScheduler: Failed to run fold at CheckpointedDrmSpark.scala:192
- 14/08/14 15:10:48 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool
- org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:0 failed 4 times, most recent failure: Exception failure in TID 6 on host vagrant-ubuntu-trusty-64: java.lang.ArrayStoreException: scala.Tuple3
- com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.read(DefaultArraySerializers.java:338)
- com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.read(DefaultArraySerializers.java:293)
- com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:729)
- com.twitter.chill.WrappedArraySerializer.read(WrappedArraySerializer.scala:34)
- com.twitter.chill.WrappedArraySerializer.read(WrappedArraySerializer.scala:21)
- com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:729)
- org.apache.spark.serializer.KryoDeserializationStream.readObject(KryoSerializer.scala:118)
- org.apache.spark.rdd.ParallelCollectionPartition$$anonfun$readObject$1.apply(ParallelCollectionRDD.scala:80)
- org.apache.spark.rdd.ParallelCollectionPartition$$anonfun$readObject$1.apply(ParallelCollectionRDD.scala:80)
- org.apache.spark.util.Utils$.deserializeViaNestedStream(Utils.scala:120)
- org.apache.spark.rdd.ParallelCollectionPartition.readObject(ParallelCollectionRDD.scala:80)
- sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- java.lang.reflect.Method.invoke(Method.java:606)
- java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.skipCustomData(ObjectInputStream.java:1956)
- java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1850)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85)
- org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:165)
- java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
- java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
- java.lang.Thread.run(Thread.java:744)
- Driver stacktrace:
- at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1044)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1028)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1026)
- at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
- at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
- at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1026)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:634)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:634)
- at scala.Option.foreach(Option.scala:236)
- at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:634)
- at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1229)
- at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
- at akka.actor.ActorCell.invoke(ActorCell.scala:456)
- at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
- at akka.dispatch.Mailbox.run(Mailbox.scala:219)
- at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Advertisement
Add Comment
Please, Sign In to add comment