Spark Executor Command: "/usr/java/latest/bin/java" "-cp" "::/opt/spark/conf:/opt/spark/lib/spark-assembly-1.0.0-hadoop2.2.0.jar:/opt/spark/lib/datanucleus-core-3.2.2.jar:/opt/spark/lib/datanucleus-api-jdo-3.2.1.jar:/opt/spark/lib/datanucleus-rdbms-3.2.1.jar" "-XX:MaxPermSize=128m" "-Xms512M" "-Xmx512M" "org.apache.spark.executor.CoarseGrainedExecutorBackend" "akka.tcp://spark@10.0.2.15:58552/user/CoarseGrainedScheduler" "2" "10.0.2.15" "1" "akka.tcp://sparkWorker@10.0.2.15:51073/user/Worker" "app-20140905150011-0014" ======================================== log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. 14/09/05 15:00:17 INFO SparkHadoopUtil: Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 14/09/05 15:00:17 INFO SecurityManager: Changing view acls to: spark,paberline 14/09/05 15:00:17 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(spark, paberline) 14/09/05 15:00:19 INFO Slf4jLogger: Slf4jLogger started 14/09/05 15:00:19 INFO Remoting: Starting remoting 14/09/05 15:00:20 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkExecutor@10.0.2.15:55176] 14/09/05 15:00:20 INFO Remoting: Remoting now listens on addresses: [akka.tcp://sparkExecutor@10.0.2.15:55176] 14/09/05 15:00:20 INFO CoarseGrainedExecutorBackend: Connecting to driver: akka.tcp://spark@10.0.2.15:58552/user/CoarseGrainedScheduler 14/09/05 15:00:20 INFO WorkerWatcher: Connecting to worker akka.tcp://sparkWorker@10.0.2.15:51073/user/Worker 14/09/05 15:00:20 INFO WorkerWatcher: Successfully connected to akka.tcp://sparkWorker@10.0.2.15:51073/user/Worker 14/09/05 15:00:21 INFO CoarseGrainedExecutorBackend: Successfully registered with driver 14/09/05 15:00:21 INFO SecurityManager: Changing view acls to: spark,paberline 14/09/05 15:00:21 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(spark, paberline) 14/09/05 15:00:21 INFO Slf4jLogger: Slf4jLogger started 14/09/05 15:00:21 INFO Remoting: Starting remoting 14/09/05 15:00:22 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://spark@10.0.2.15:53097] 14/09/05 15:00:22 INFO Remoting: Remoting now listens on addresses: [akka.tcp://spark@10.0.2.15:53097] 14/09/05 15:00:22 INFO SparkEnv: Connecting to MapOutputTracker: akka.tcp://spark@10.0.2.15:58552/user/MapOutputTracker 14/09/05 15:00:22 INFO SparkEnv: Connecting to BlockManagerMaster: akka.tcp://spark@10.0.2.15:58552/user/BlockManagerMaster 14/09/05 15:00:22 INFO DiskBlockManager: Created local directory at /tmp/spark-local-20140905150022-f443 14/09/05 15:00:22 INFO MemoryStore: MemoryStore started with capacity 297.0 MB. 14/09/05 15:00:22 INFO ConnectionManager: Bound socket to port 37643 with id = ConnectionManagerId(10.0.2.15,37643) 14/09/05 15:00:22 INFO BlockManagerMaster: Trying to register BlockManager 14/09/05 15:00:22 INFO BlockManagerMaster: Registered BlockManager 14/09/05 15:00:22 INFO HttpFileServer: HTTP File server directory is /tmp/spark-5884cbbb-5608-4375-8408-e4e11405f836 14/09/05 15:00:22 INFO HttpServer: Starting HTTP Server 14/09/05 15:00:23 WARN Utils: Your hostname, localhost.localdomain resolves to a loopback address: 127.0.0.1; using 10.0.2.15 instead (on interface eth3) 14/09/05 15:00:23 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address 14/09/05 15:00:24 INFO CoarseGrainedExecutorBackend: Got assigned task 2 14/09/05 15:00:24 INFO Executor: Running task ID 2 14/09/05 15:00:24 INFO Executor: Fetching http://10.0.2.15:46512/jars/simpleapp_2.10-1.0.jar with timestamp 1409925610776 14/09/05 15:00:24 INFO Utils: Fetching http://10.0.2.15:46512/jars/simpleapp_2.10-1.0.jar to /tmp/fetchFileTemp6987419908001191420.tmp 14/09/05 15:00:24 INFO Executor: Adding file:/opt/spark-1.0.0-bin-hadoop2/work/app-20140905150011-0014/2/./simpleapp_2.10-1.0.jar to class loader 14/09/05 15:00:24 ERROR Executor: Exception in task ID 2 java.lang.ClassNotFoundException: ClassToRoundTrip at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:270) at java.io.ObjectInputStream.resolveClass(ObjectInputStream.java:625) at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612) at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) at java.io.ObjectInputStream.readClass(ObjectInputStream.java:1483) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1333) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at scala.util.Marshal$.load(Marshal.scala:38) at RoundTripTester$.test(SimpleApp.scala:23) at SimpleApp$$anonfun$main$1.apply$mcVI$sp(SimpleApp.scala:41) at SimpleApp$$anonfun$main$1.apply(SimpleApp.scala:41) at SimpleApp$$anonfun$main$1.apply(SimpleApp.scala:41) at scala.collection.Iterator$class.foreach(Iterator.scala:727) at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28) at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:703) at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:703) at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1080) at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1080) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:111) at org.apache.spark.scheduler.Task.run(Task.scala:51) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:187) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:744) 14/09/05 15:00:24 INFO CoarseGrainedExecutorBackend: Got assigned task 4 14/09/05 15:00:24 INFO Executor: Running task ID 4 14/09/05 15:00:24 ERROR Executor: Exception in task ID 4 java.lang.ClassNotFoundException: ClassToRoundTrip at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:270) at java.io.ObjectInputStream.resolveClass(ObjectInputStream.java:625) at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612) at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) at java.io.ObjectInputStream.readClass(ObjectInputStream.java:1483) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1333) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at scala.util.Marshal$.load(Marshal.scala:38) at RoundTripTester$.test(SimpleApp.scala:23) at SimpleApp$$anonfun$main$1.apply$mcVI$sp(SimpleApp.scala:41) at SimpleApp$$anonfun$main$1.apply(SimpleApp.scala:41) at SimpleApp$$anonfun$main$1.apply(SimpleApp.scala:41) at scala.collection.Iterator$class.foreach(Iterator.scala:727) at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28) at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:703) at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:703) at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1080) at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1080) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:111) at org.apache.spark.scheduler.Task.run(Task.scala:51) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:187) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:744)