Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- 2014-11-07 17:10:23,863 WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
- 2014-11-07 17:10:24,815 INFO com.sirsidynix.Transform - spark.akka.frameSize=500
- spark.akka.threads=6
- spark.app.name=1d665ef4-com.sirsidynix.Transform
- spark.cores.max=1
- spark.default.parallelism=32
- spark.driver.host=1.1.1.1
- spark.driver.port=47200
- spark.executor.memory=1G
- spark.executor.uri=
- spark.fileserver.uri=http://1.1.1.1:58557
- spark.httpBroadcast.uri=http://1.1.1.1:39980
- spark.master=spark://1.1.1.1:7077
- spark.tachyonStore.folderName=spark-15470493-b67b-4a41-a46e-65e74d2277c6
- spark.ui.port=0
- 2014-11-07 17:10:24,947 INFO com.sirsidynix.Transform - I am doing symphony work!
- 2014-11-07 17:10:34,128 INFO com.sirsidynix.Transform - spark.akka.frameSize=500
- spark.akka.threads=6
- spark.app.name=e9a478b0-com.sirsidynix.Transform
- spark.cores.max=1
- spark.default.parallelism=32
- spark.driver.host=1.1.1.1
- spark.driver.port=59928
- spark.executor.memory=1G
- spark.executor.uri=
- spark.fileserver.uri=http://1.1.1.1:37663
- spark.master=spark://1.1.1.1:7077
- spark.tachyonStore.folderName=spark-67075c0b-7284-42a1-8d94-6e92dd920be8
- spark.ui.port=0
- 2014-11-07 17:10:34,139 INFO com.sirsidynix.Transform - I am doing symphony work!
- 2014-11-07 17:10:37,527 ERROR org.apache.spark.scheduler.TaskSchedulerImpl - Lost executor 0 on 1.1.1.1: remote Akka client disassociated
- 2014-11-07 17:10:37,569 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 0 (task 0.0:0)
- 2014-11-07 17:10:40,927 INFO com.sirsidynix.sym.transformers.SymCheckout -
- Running SymCheckout...
- 2014-11-07 17:10:40,968 ERROR org.apache.spark.scheduler.TaskSchedulerImpl - Lost executor 1 on 1.1.1.1: remote Akka client disassociated
- 2014-11-07 17:10:40,985 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 1 (task 0.0:0)
- 2014-11-07 17:10:44,444 ERROR org.apache.spark.scheduler.TaskSchedulerImpl - Lost executor 2 on 1.1.1.1: remote Akka client disassociated
- 2014-11-07 17:10:44,444 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 2 (task 0.0:0)
- 2014-11-07 17:10:47,898 ERROR org.apache.spark.scheduler.TaskSchedulerImpl - Lost executor 3 on 1.1.1.1: remote Akka client disassociated
- 2014-11-07 17:10:47,898 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 3 (task 0.0:0)
- 2014-11-07 17:10:47,901 ERROR org.apache.spark.scheduler.TaskSetManager - Task 0.0:0 failed 4 times; aborting job
- 2014-11-07 17:10:47,921 ERROR com.sirsidynix.sym.transformers.SymPolicies - Problem running SymPolicies: Job aborted due to stage failure: Task 0.0:0 failed 4 times, most recent failure: TID 3 on host 1.1.1.1 failed for unknown reason
- Driver stacktrace:
- org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1033)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1017)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1015)
- scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
- scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
- org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1015)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:633)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:633)
- scala.Option.foreach(Option.scala:236)
- org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:633)
- org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1207)
- akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
- akka.actor.ActorCell.invoke(ActorCell.scala:456)
- akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
- akka.dispatch.Mailbox.run(Mailbox.scala:219)
- akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:10:50,941 INFO com.sirsidynix.util.ImpLoader$ - Drop and recreate Impala table: checkout with newly transformed data in hdfs://1.1.1.1:8020/jarvis/parquet/67fc0cb1-f220-482a-8e91-c6d669aae84d/checkout Impala IP = 1.1.1.1 dbName = abcdefg
- 2014-11-07 17:10:51,236 ERROR org.apache.spark.scheduler.TaskSchedulerImpl - Lost an executor 4 (already removed): remote Akka client disassociated
- 2014-11-07 17:10:51,984 INFO com.sirsidynix.util.ImpLoader$ - [?1034h
- 2014-11-07 17:10:52,697 INFO com.sirsidynix.util.ImpLoader$ - [?1034h
- 2014-11-07 17:10:53,179 ERROR com.sirsidynix.sym.transformers.SymCheckout - Problem running checkout: Nonzero exit value: 1
- scala.sys.package$.error(package.scala:27)
- scala.sys.process.ProcessBuilderImpl$AbstractBuilder.slurp(ProcessBuilderImpl.scala:131)
- scala.sys.process.ProcessBuilderImpl$AbstractBuilder.$bang$bang(ProcessBuilderImpl.scala:101)
- com.sirsidynix.util.ImpLoader$.loadImpParquet(ImpLoader.scala:16)
- com.sirsidynix.sym.transformers.SymCheckout.loadImpalaParquet(SymCheckout.scala:74)
- com.sirsidynix.sym.transformers.SymCheckout.doRun(SymCheckout.scala:57)
- com.sirsidynix.sym.SymTransform$.run(SymTransform.scala:26)
- com.sirsidynix.Transform.runJob(Transform.scala:49)
- spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobManagerActor.scala:218)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
- akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
- akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:10:53,183 INFO com.sirsidynix.sym.transformers.SymUser -
- Running SymUser...
- 2014-11-07 17:10:54,844 ERROR org.apache.spark.scheduler.TaskSchedulerImpl - Lost an executor 5 (already removed): remote Akka client disassociated
- 2014-11-07 17:10:56,982 INFO com.sirsidynix.sym.transformers.SymCheckout -
- Running SymCheckout...
- 2014-11-07 17:10:57,014 ERROR com.sirsidynix.sym.transformers.SymCheckout - Problem running checkout: null
- com.sirsidynix.sym.transformers.SymCheckout.doRun(SymCheckout.scala:24)
- com.sirsidynix.sym.SymTransform$.run(SymTransform.scala:26)
- com.sirsidynix.Transform.runJob(Transform.scala:49)
- spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobManagerActor.scala:218)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
- akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
- akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:10:57,026 INFO com.sirsidynix.sym.transformers.SymUser -
- Running SymUser...
- 2014-11-07 17:10:57,049 ERROR com.sirsidynix.sym.transformers.SymUser - Problem running SymUsers: null
- com.sirsidynix.sym.transformers.SymUser.doRun(SymUser.scala:26)
- com.sirsidynix.sym.SymTransform$.run(SymTransform.scala:28)
- com.sirsidynix.Transform.runJob(Transform.scala:49)
- spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobManagerActor.scala:218)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
- akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
- akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:10:57,063 INFO com.sirsidynix.sym.transformers.SymUserAddress -
- Running SymUserAddress...
- 2014-11-07 17:10:57,074 ERROR com.sirsidynix.sym.transformers.SymUserAddress - Problem
- java.lang.NullPointerException
- at com.sirsidynix.sym.transformers.SymUserAddress.doRun(SymUserAddress.scala:25)
- at com.sirsidynix.sym.SymTransform$.run(SymTransform.scala:30)
- at com.sirsidynix.Transform.runJob(Transform.scala:49)
- at spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobManagerActor.scala:218)
- at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
- at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
- at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
- at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:10:57,077 ERROR com.sirsidynix.sym.transformers.SymUserAddress - Problem running SymUserAddress: null
- com.sirsidynix.sym.transformers.SymUserAddress.doRun(SymUserAddress.scala:25)
- com.sirsidynix.sym.SymTransform$.run(SymTransform.scala:30)
- com.sirsidynix.Transform.runJob(Transform.scala:49)
- spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobManagerActor.scala:218)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
- akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
- akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:10:57,094 WARN spark.jobserver.RddManagerActor - Shutting down spark.jobserver.RddManagerActor
- 2014-11-07 17:10:57,094 WARN spark.jobserver.JobStatusActor - Shutting down spark.jobserver.JobStatusActor
- 2014-11-07 17:11:01,992 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 2 (task 36.0:0)
- 2014-11-07 17:11:02,001 WARN org.apache.spark.scheduler.TaskSetManager - Loss was due to java.net.ConnectException
- java.net.ConnectException: Connection refused
- at java.net.PlainSocketImpl.socketConnect(Native Method)
- at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:339)
- at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:200)
- at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:182)
- at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
- at java.net.Socket.connect(Socket.java:579)
- at java.net.Socket.connect(Socket.java:528)
- at sun.net.NetworkClient.doConnect(NetworkClient.java:180)
- at sun.net.www.http.HttpClient.openServer(HttpClient.java:432)
- at sun.net.www.http.HttpClient.openServer(HttpClient.java:527)
- at sun.net.www.http.HttpClient.<init>(HttpClient.java:211)
- at sun.net.www.http.HttpClient.New(HttpClient.java:308)
- at sun.net.www.http.HttpClient.New(HttpClient.java:326)
- at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:996)
- at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:932)
- at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:850)
- at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1300)
- at org.apache.spark.broadcast.HttpBroadcast$.read(HttpBroadcast.scala:196)
- at org.apache.spark.broadcast.HttpBroadcast.readObject(HttpBroadcast.scala:89)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:606)
- at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:606)
- at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- at org.apache.spark.scheduler.ShuffleMapTask$.deserializeInfo(ShuffleMapTask.scala:63)
- at org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:135)
- at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1837)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:169)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
- at java.lang.Thread.run(Thread.java:745)
- 2014-11-07 17:11:02,018 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 3 (task 37.0:0)
- 2014-11-07 17:11:02,023 WARN org.apache.spark.scheduler.TaskSetManager - Loss was due to java.net.ConnectException
- java.net.ConnectException: Connection refused
- at java.net.PlainSocketImpl.socketConnect(Native Method)
- at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:339)
- at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:200)
- at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:182)
- at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
- at java.net.Socket.connect(Socket.java:579)
- at java.net.Socket.connect(Socket.java:528)
- at sun.net.NetworkClient.doConnect(NetworkClient.java:180)
- at sun.net.www.http.HttpClient.openServer(HttpClient.java:432)
- at sun.net.www.http.HttpClient.openServer(HttpClient.java:527)
- at sun.net.www.http.HttpClient.<init>(HttpClient.java:211)
- at sun.net.www.http.HttpClient.New(HttpClient.java:308)
- at sun.net.www.http.HttpClient.New(HttpClient.java:326)
- at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:996)
- at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:932)
- at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:850)
- at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1300)
- at org.apache.spark.broadcast.HttpBroadcast$.read(HttpBroadcast.scala:196)
- at org.apache.spark.broadcast.HttpBroadcast.readObject(HttpBroadcast.scala:89)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:606)
- at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:606)
- at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:606)
- at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- at org.apache.spark.scheduler.ShuffleMapTask$.deserializeInfo(ShuffleMapTask.scala:63)
- at org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:135)
- at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1837)
- at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796)
- at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:169)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
- at java.lang.Thread.run(Thread.java:745)
- 2014-11-07 17:11:02,034 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 4 (task 36.0:0)
- 2014-11-07 17:11:02,055 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 5 (task 37.0:0)
- 2014-11-07 17:11:02,078 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 6 (task 36.0:0)
- 2014-11-07 17:11:02,099 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 7 (task 37.0:0)
- 2014-11-07 17:11:02,123 WARN org.apache.spark.scheduler.TaskSetManager - Lost TID 8 (task 36.0:0)
- 2014-11-07 17:11:02,123 ERROR org.apache.spark.scheduler.TaskSetManager - Task 36.0:0 failed 4 times; aborting job
- 2014-11-07 17:11:02,130 ERROR com.sirsidynix.sym.transformers.SymUser - Problem running SymUsers: Job aborted due to stage failure: Task 36.0:0 failed 4 times, most recent failure: Exception failure in TID 8 on host 1.1.1.1: java.net.ConnectException: Connection refused
- java.net.PlainSocketImpl.socketConnect(Native Method)
- java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:339)
- java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:200)
- java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:182)
- java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
- java.net.Socket.connect(Socket.java:579)
- java.net.Socket.connect(Socket.java:528)
- sun.net.NetworkClient.doConnect(NetworkClient.java:180)
- sun.net.www.http.HttpClient.openServer(HttpClient.java:432)
- sun.net.www.http.HttpClient.openServer(HttpClient.java:527)
- sun.net.www.http.HttpClient.<init>(HttpClient.java:211)
- sun.net.www.http.HttpClient.New(HttpClient.java:308)
- sun.net.www.http.HttpClient.New(HttpClient.java:326)
- sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:996)
- sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:932)
- sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:850)
- sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1300)
- org.apache.spark.broadcast.HttpBroadcast$.read(HttpBroadcast.scala:196)
- org.apache.spark.broadcast.HttpBroadcast.readObject(HttpBroadcast.scala:89)
- sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- java.lang.reflect.Method.invoke(Method.java:606)
- java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- scala.collection.immutable.$colon$colon.readObject(List.scala:362)
- sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
- sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- java.lang.reflect.Method.invoke(Method.java:606)
- java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
- java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
- java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- org.apache.spark.scheduler.ShuffleMapTask$.deserializeInfo(ShuffleMapTask.scala:63)
- org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:135)
- java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1837)
- java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1796)
- java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
- java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
- org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
- org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85)
- org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:169)
- java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
- java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
- java.lang.Thread.run(Thread.java:745)
- Driver stacktrace:
- org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1033)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1017)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1015)
- scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
- scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
- org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1015)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:633)
- org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:633)
- scala.Option.foreach(Option.scala:236)
- org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:633)
- org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1207)
- akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
- akka.actor.ActorCell.invoke(ActorCell.scala:456)
- akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
- akka.dispatch.Mailbox.run(Mailbox.scala:219)
- akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:11:02,134 INFO com.sirsidynix.sym.transformers.SymUserAddress -
- Running SymUserAddress...
- 2014-11-07 17:11:02,176 ERROR com.sirsidynix.sym.transformers.SymUserAddress - Problem
- java.lang.NullPointerException
- at org.apache.spark.broadcast.HttpBroadcast$.write(HttpBroadcast.scala:167)
- at org.apache.spark.broadcast.HttpBroadcast.<init>(HttpBroadcast.scala:57)
- at org.apache.spark.broadcast.HttpBroadcastFactory.newBroadcast(HttpBroadcastFactory.scala:35)
- at org.apache.spark.broadcast.HttpBroadcastFactory.newBroadcast(HttpBroadcastFactory.scala:29)
- at org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:62)
- at org.apache.spark.SparkContext.broadcast(SparkContext.scala:776)
- at org.apache.spark.rdd.NewHadoopRDD.<init>(NewHadoopRDD.scala:72)
- at org.apache.spark.SparkContext.newAPIHadoopFile(SparkContext.scala:626)
- at com.sirsidynix.util.BaseTransformer$class.readParquetFile(BaseTransformer.scala:23)
- at com.sirsidynix.sym.transformers.SymUserAddress.readParquetFile(SymUserAddress.scala:16)
- at com.sirsidynix.sym.transformers.SymUserAddress.buildUserAddress(SymUserAddress.scala:114)
- at com.sirsidynix.sym.transformers.SymUserAddress.doRun(SymUserAddress.scala:26)
- at com.sirsidynix.sym.SymTransform$.run(SymTransform.scala:30)
- at com.sirsidynix.Transform.runJob(Transform.scala:49)
- at spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobManagerActor.scala:218)
- at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
- at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
- at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
- at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:11:02,188 ERROR com.sirsidynix.sym.transformers.SymUserAddress - Problem running SymUserAddress: null
- org.apache.spark.broadcast.HttpBroadcast$.write(HttpBroadcast.scala:167)
- org.apache.spark.broadcast.HttpBroadcast.<init>(HttpBroadcast.scala:57)
- org.apache.spark.broadcast.HttpBroadcastFactory.newBroadcast(HttpBroadcastFactory.scala:35)
- org.apache.spark.broadcast.HttpBroadcastFactory.newBroadcast(HttpBroadcastFactory.scala:29)
- org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:62)
- org.apache.spark.SparkContext.broadcast(SparkContext.scala:776)
- org.apache.spark.rdd.NewHadoopRDD.<init>(NewHadoopRDD.scala:72)
- org.apache.spark.SparkContext.newAPIHadoopFile(SparkContext.scala:626)
- com.sirsidynix.util.BaseTransformer$class.readParquetFile(BaseTransformer.scala:23)
- com.sirsidynix.sym.transformers.SymUserAddress.readParquetFile(SymUserAddress.scala:16)
- com.sirsidynix.sym.transformers.SymUserAddress.buildUserAddress(SymUserAddress.scala:114)
- com.sirsidynix.sym.transformers.SymUserAddress.doRun(SymUserAddress.scala:26)
- com.sirsidynix.sym.SymTransform$.run(SymTransform.scala:30)
- com.sirsidynix.Transform.runJob(Transform.scala:49)
- spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobManagerActor.scala:218)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
- scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
- akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
- akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
- scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
- scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
- scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
- scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
- 2014-11-07 17:11:02,191 WARN spark.jobserver.RddManagerActor - Shutting down spark.jobserver.RddManagerActor
- 2014-11-07 17:11:02,191 WARN spark.jobserver.JobStatusActor - Shutting down spark.jobserver.JobStatusActor
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement