Spark Executor Command: "/usr/java/jdk1.7.0_21/bin/java" "-cp" ":/usr/lib/spark-0.9.0/conf:/usr/lib/spark-0.9.0/assembly/target/scala-2.10/spark-assembly_2.10-0.9.0-incubating-hadoop2.0.0-mr1-cdh4.2.0.jar" "-Xms3072M" "-Xmx3072M" "org.apache.spark.executor.CoarseGrainedExecutorBackend" "akka.tcp://spark@jaggu4:44892/user/CoarseGrainedScheduler" "1" "jaggu1" "4" "akka.tcp://sparkWorker@jaggu1:58439/user/Worker" "app-20140221171107-0002" ======================================== log4j:WARN No appenders could be found for logger (akka.event.slf4j.Slf4jLogger). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. 14/02/21 17:11:08 INFO CoarseGrainedExecutorBackend: Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 14/02/21 17:11:08 INFO CoarseGrainedExecutorBackend: Connecting to driver: akka.tcp://spark@jaggu4:44892/user/CoarseGrainedScheduler 14/02/21 17:11:08 INFO WorkerWatcher: Connecting to worker akka.tcp://sparkWorker@jaggu1:58439/user/Worker 14/02/21 17:11:08 INFO WorkerWatcher: Successfully connected to akka.tcp://sparkWorker@jaggu1:58439/user/Worker 14/02/21 17:11:08 INFO CoarseGrainedExecutorBackend: Successfully registered with driver 14/02/21 17:11:09 INFO Slf4jLogger: Slf4jLogger started 14/02/21 17:11:09 INFO Remoting: Starting remoting 14/02/21 17:11:09 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://spark@jaggu1:36171] 14/02/21 17:11:09 INFO Remoting: Remoting now listens on addresses: [akka.tcp://spark@jaggu1:36171] 14/02/21 17:11:09 INFO SparkEnv: Connecting to BlockManagerMjaggu: akka.tcp://spark@jaggu4:44892/user/BlockManagerMjaggu 14/02/21 17:11:09 INFO DiskBlockManager: Created local directory at /tmp/spark-local-20140221171109-e00e 14/02/21 17:11:09 INFO MemoryStore: MemoryStore started with capacity 1766.4 MB. 14/02/21 17:11:09 INFO ConnectionManager: Bound socket to port 24973 with id = ConnectionManagerId(jaggu1,24973) 14/02/21 17:11:09 INFO BlockManagerMjaggu: Trying to register BlockManager 14/02/21 17:11:09 INFO BlockManagerMjaggu: Registered BlockManager 14/02/21 17:11:09 INFO SparkEnv: Connecting to MapOutputTracker: akka.tcp://spark@jaggu4:44892/user/MapOutputTracker 14/02/21 17:11:09 INFO HttpFileServer: HTTP File server directory is /tmp/spark-b61f30d6-1d27-4be0-80e2-57c804a152b5 14/02/21 17:11:09 INFO HttpServer: Starting HTTP Server 14/02/21 17:11:16 INFO CoarseGrainedExecutorBackend: Got assigned task 2 14/02/21 17:11:16 INFO CoarseGrainedExecutorBackend: Got assigned task 4 14/02/21 17:11:16 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 14/02/21 17:11:16 INFO Executor: Running task ID 2 14/02/21 17:11:16 INFO Executor: Running task ID 4 14/02/21 17:11:16 INFO Executor: Fetching http://192.168.0.1:51935/jars/wordcount-assembly-1.1.jar with timestamp 1392982867330 14/02/21 17:11:16 INFO Utils: Fetching http://192.168.0.1:51935/jars/wordcount-assembly-1.1.jar to /tmp/fetchFileTemp8940484103418095162.tmp 14/02/21 17:11:22 INFO Executor: Adding file:/usr/lib/spark-0.9.0/data/app-20140221171107-0002/1/./wordcount-assembly-1.1.jar to class loader 14/02/21 17:11:22 INFO HttpBroadcast: Started reading broadcast variable 0 14/02/21 17:11:22 INFO MemoryStore: ensureFreeSpace(172637) called with curMem=0, maxMem=1852204646 14/02/21 17:11:22 INFO MemoryStore: Block broadcast_0 stored as values to memory (estimated size 168.6 KB, free 1766.2 MB) 14/02/21 17:11:22 INFO HttpBroadcast: Reading broadcast variable 0 took 0.170704003 s 14/02/21 17:11:22 INFO BlockManager: Found block broadcast_0 locally 14/02/21 17:11:22 ERROR Executor: Exception in task ID 2 java.io.EOFException at java.io.ObjectInputStream$BlockDataInputStream.readFully(ObjectInputStream.java:2742) at java.io.ObjectInputStream.readFully(ObjectInputStream.java:1030) at org.apache.hadoop.io.DataOutputBuffer$Buffer.write(DataOutputBuffer.java:68) at org.apache.hadoop.io.DataOutputBuffer.write(DataOutputBuffer.java:106) at org.apache.hadoop.io.UTF8.readChars(UTF8.java:258) at org.apache.hadoop.io.UTF8.readString(UTF8.java:250) at org.apache.hadoop.mapred.FileSplit.readFields(FileSplit.java:87) at org.apache.hadoop.io.ObjectWritable.readObject(ObjectWritable.java:280) at org.apache.hadoop.io.ObjectWritable.readFields(ObjectWritable.java:75) at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:39) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:601) at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1004) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1891) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1989) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1913) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:143) at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1835) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1794) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:40) at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:62) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$run$1.apply$mcV$sp(Executor.scala:195) at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:49) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:722) 14/02/21 17:11:22 ERROR Executor: Exception in task ID 4 java.io.EOFException at java.io.ObjectInputStream$BlockDataInputStream.readFully(ObjectInputStream.java:2742) at java.io.ObjectInputStream.readFully(ObjectInputStream.java:1030) at org.apache.hadoop.io.DataOutputBuffer$Buffer.write(DataOutputBuffer.java:68) at org.apache.hadoop.io.DataOutputBuffer.write(DataOutputBuffer.java:106) at org.apache.hadoop.io.UTF8.readChars(UTF8.java:258) at org.apache.hadoop.io.UTF8.readString(UTF8.java:250) at org.apache.hadoop.mapred.FileSplit.readFields(FileSplit.java:87) at org.apache.hadoop.io.ObjectWritable.readObject(ObjectWritable.java:280) at org.apache.hadoop.io.ObjectWritable.readFields(ObjectWritable.java:75) at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:39) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:601) at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1004) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1891) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1989) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1913) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:143) at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1835) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1794) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:40) at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:62) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$run$1.apply$mcV$sp(Executor.scala:195) at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:49) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:722) 14/02/21 17:11:22 INFO CoarseGrainedExecutorBackend: Got assigned task 5 14/02/21 17:11:22 INFO Executor: Running task ID 5 14/02/21 17:11:22 INFO CoarseGrainedExecutorBackend: Got assigned task 6 14/02/21 17:11:22 INFO Executor: Running task ID 6 14/02/21 17:11:22 INFO BlockManager: Found block broadcast_0 locally 14/02/21 17:11:22 INFO BlockManager: Found block broadcast_0 locally 14/02/21 17:11:22 ERROR Executor: Exception in task ID 5 java.io.EOFException at java.io.ObjectInputStream$BlockDataInputStream.readFully(ObjectInputStream.java:2742) at java.io.ObjectInputStream.readFully(ObjectInputStream.java:1030) at org.apache.hadoop.io.DataOutputBuffer$Buffer.write(DataOutputBuffer.java:68) at org.apache.hadoop.io.DataOutputBuffer.write(DataOutputBuffer.java:106) at org.apache.hadoop.io.UTF8.readChars(UTF8.java:258) at org.apache.hadoop.io.UTF8.readString(UTF8.java:250) at org.apache.hadoop.mapred.FileSplit.readFields(FileSplit.java:87) at org.apache.hadoop.io.ObjectWritable.readObject(ObjectWritable.java:280) at org.apache.hadoop.io.ObjectWritable.readFields(ObjectWritable.java:75) at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:39) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:601) at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1004) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1891) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1989) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1913) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:143) at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1835) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1794) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:40) at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:62) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$run$1.apply$mcV$sp(Executor.scala:195) at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:49) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:722) 14/02/21 17:11:22 ERROR Executor: Exception in task ID 6 java.io.EOFException at java.io.ObjectInputStream$BlockDataInputStream.readFully(ObjectInputStream.java:2742) at java.io.ObjectInputStream.readFully(ObjectInputStream.java:1030) at org.apache.hadoop.io.DataOutputBuffer$Buffer.write(DataOutputBuffer.java:68) at org.apache.hadoop.io.DataOutputBuffer.write(DataOutputBuffer.java:106) at org.apache.hadoop.io.UTF8.readChars(UTF8.java:258) at org.apache.hadoop.io.UTF8.readString(UTF8.java:250) at org.apache.hadoop.mapred.FileSplit.readFields(FileSplit.java:87) at org.apache.hadoop.io.ObjectWritable.readObject(ObjectWritable.java:280) at org.apache.hadoop.io.ObjectWritable.readFields(ObjectWritable.java:75) at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:39) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:601) at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1004) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1891) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1989) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1913) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:143) at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1835) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1794) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:40) at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:62) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$run$1.apply$mcV$sp(Executor.scala:195) at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:49) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:722) 14/02/21 17:11:22 INFO CoarseGrainedExecutorBackend: Got assigned task 7 14/02/21 17:11:22 INFO Executor: Running task ID 7 14/02/21 17:11:22 INFO BlockManager: Found block broadcast_0 locally 14/02/21 17:11:22 ERROR Executor: Exception in task ID 7 java.io.EOFException at java.io.ObjectInputStream$BlockDataInputStream.readFully(ObjectInputStream.java:2742) at java.io.ObjectInputStream.readFully(ObjectInputStream.java:1030) at org.apache.hadoop.io.DataOutputBuffer$Buffer.write(DataOutputBuffer.java:68) at org.apache.hadoop.io.DataOutputBuffer.write(DataOutputBuffer.java:106) at org.apache.hadoop.io.UTF8.readChars(UTF8.java:258) at org.apache.hadoop.io.UTF8.readString(UTF8.java:250) at org.apache.hadoop.mapred.FileSplit.readFields(FileSplit.java:87) at org.apache.hadoop.io.ObjectWritable.readObject(ObjectWritable.java:280) at org.apache.hadoop.io.ObjectWritable.readFields(ObjectWritable.java:75) at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:39) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:601) at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1004) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1891) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1989) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1913) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:143) at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1835) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1794) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1348) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:40) at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:62) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$run$1.apply$mcV$sp(Executor.scala:195) at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:49) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:722)