Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- {"Event":"SparkListenerLogStart","Spark Version":"1.6.1"}
- {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor ID":"driver","Host":"10.228.168.238","Port":39797},"Maximum Memory":569376768,"Timestamp":1463768841483}
- {"Event":"SparkListenerEnvironmentUpdate","JVM Information":{"Java Home":"/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre","Java Version":"1.8.0_72 (Oracle Corporation)","Scala Version":"version 2.10.5"},"Spark Properties":{"spark.serializer":"org.apache.spark.serializer.KryoSerializer","spark.executor.extraJavaOptions":"-XX:+UseG1GC","spark.driver.host":"10.228.168.238","spark.history.fs.logDirectory":"hdfs:///spark-events","spark.eventLog.enabled":"true","spark.ui.port":"4040","spark.driver.port":"50079","spark.shuffle.service.enabled":"true","spark.driver.extraLibraryPath":"/apollo/env/A9SL-SLDSP/hadoop/lib/native","spark.repl.class.uri":"http://10.228.168.238:33541","spark.jars":"","spark.app.name":"Spark shell","spark.scheduler.mode":"FIFO","spark.logConf":"true","spark.history.fs.cleaner.enabled":"true","spark.executor.id":"driver","spark.driver.extraJavaOptions":"-Duser=weifengc -XX:+UseG1GC","spark.submit.deployMode":"client","spark.master":"yarn-client","spark.ui.filters":"org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter","spark.driver.extraClassPath":"/apollo/env/A9SL-SLDSP/spark/lib/mysql-connector-java.jar","spark.eventLog.dir":"hdfs:///spark-events","spark.dynamicAllocation.enabled":"true","spark.eventLog.compress":"false","spark.driver.appUIAddress":"http://10.228.168.238:4040","spark.externalBlockStore.folderName":"spark-92746c7a-c790-4d4e-b634-1d426a7f7fd1","spark.org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter.param.PROXY_HOSTS":"a9sl-sldsp-testcluster-master-6001.iad6.amazon.com","spark.org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter.param.PROXY_URI_BASES":"http://a9sl-sldsp-testcluster-master-6001.iad6.amazon.com:8088/proxy/application_1463692924309_0002","spark.app.id":"application_1463692924309_0002"},"System Properties":{"java.io.tmpdir":"/tmp","line.separator":"\n","path.separator":":","sun.management.compiler":"HotSpot 64-Bit Tiered Compilers","SPARK_SUBMIT":"true","sun.cpu.endian":"little","java.specification.version":"1.8","java.vm.specification.name":"Java Virtual Machine Specification","java.vendor":"Oracle Corporation","java.vm.specification.version":"1.8","user.home":"/home/weifengc","file.encoding.pkg":"sun.io","sun.nio.ch.bugLevel":"","sun.arch.data.model":"64","sun.boot.library.path":"/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/amd64","user.dir":"/home/weifengc","java.library.path":"/apollo/env/A9SL-SLDSP/hadoop/lib/native:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib","sun.cpu.isalist":"","os.arch":"amd64","java.vm.version":"25.72-b15","java.endorsed.dirs":"/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/endorsed","java.runtime.version":"1.8.0_72-b15","java.vm.info":"mixed mode","java.ext.dirs":"/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/ext:/usr/java/packages/lib/ext","java.runtime.name":"Java(TM) SE Runtime Environment","file.separator":"/","java.class.version":"52.0","SPARK_YARN_MODE":"true","scala.usejavacp":"true","java.specification.name":"Java Platform API Specification","sun.boot.class.path":"/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/resources.jar:/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/rt.jar:/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/sunrsasign.jar:/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/jsse.jar:/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/jce.jar:/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/charsets.jar:/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/lib/jfr.jar:/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre/classes","file.encoding":"UTF-8","user.timezone":"UTC","java.specification.vendor":"Oracle Corporation","sun.java.launcher":"SUN_STANDARD","os.version":"3.2.45-0.6.wd.799.47.315.metal1.x86_64","sun.os.patch.level":"unknown","java.vm.specification.vendor":"Oracle Corporation","user.country":"US","sun.jnu.encoding":"UTF-8","user.language":"en","java.vendor.url":"http://java.oracle.com/","java.awt.printerjob":"sun.print.PSPrinterJob","java.awt.graphicsenv":"sun.awt.X11GraphicsEnvironment","awt.toolkit":"sun.awt.X11.XToolkit","os.name":"Linux","java.vm.vendor":"Oracle Corporation","java.vendor.url.bug":"http://bugreport.sun.com/bugreport/","user.name":"weifengc","user":"weifengc","java.vm.name":"Java HotSpot(TM) 64-Bit Server VM","sun.java.command":"org.apache.spark.deploy.SparkSubmit --master yarn --deploy-mode client --conf spark.driver.extraLibraryPath=/apollo/env/A9SL-SLDSP/hadoop/lib/native --conf spark.driver.extraClassPath=/apollo/env/A9SL-SLDSP/spark/lib/mysql-connector-java.jar --conf spark.ui.port=4040 --conf spark.driver.extraJavaOptions=-Duser=weifengc -XX:+UseG1GC --class org.apache.spark.repl.Main --name Spark shell spark-shell","java.home":"/local/apollo/package/local_1/Linux-2.6c2.5-x86_64/JDK8/JDK8-7314.0-0/jdk1.8/jre","java.version":"1.8.0_72","sun.io.unicode.encoding":"UnicodeLittle"},"Classpath Entries":{"/apollo/env/A9SL-SLDSP/spark/lib/spark-assembly-1.6.1-hadoop2.6.0.jar":"System Classpath","/apollo/env/A9SL-SLDSP/spark/lib/datanucleus-api-jdo-3.2.6.jar":"System Classpath","/apollo/env/A9SL-SLDSP/spark/lib/datanucleus-core-3.2.10.jar":"System Classpath","/apollo/env/A9SL-SLDSP/spark/lib/mysql-connector-java.jar":"System Classpath","/apollo/env/A9SL-SLDSP/spark/lib/datanucleus-rdbms-3.2.9.jar":"System Classpath","/apollo/env/A9SL-SLDSP/sldsp-conf/hadoop/":"System Classpath","/apollo/env/A9SL-SLDSP/sldsp-conf/spark/":"System Classpath"}}
- {"Event":"SparkListenerApplicationStart","App Name":"Spark shell","App ID":"application_1463692924309_0002","Timestamp":1463768834665,"User":"weifengc"}
- {"Event":"SparkListenerJobStart","Job ID":0,"Submission Time":1463769046465,"Stage Infos":[{"Stage ID":0,"Stage Attempt ID":0,"Stage Name":"count at <console>:30","Number of Tasks":2,"RDD Info":[{"RDD ID":1,"Name":"/spark-events/application_1463681113470_0006","Scope":"{\"id\":\"0\",\"name\":\"textFile\"}","Callsite":"textFile at <console>:27","Parent IDs":[0],"Storage Level":{"Use Disk":false,"Use Memory":false,"Use ExternalBlockStore":false,"Deserialized":false,"Replication":1},"Number of Partitions":2,"Number of Cached Partitions":0,"Memory Size":0,"ExternalBlockStore Size":0,"Disk Size":0},{"RDD ID":0,"Name":"/spark-events/application_1463681113470_0006","Scope":"{\"id\":\"0\",\"name\":\"textFile\"}","Callsite":"textFile at <console>:27","Parent IDs":[],"Storage Level":{"Use Disk":false,"Use Memory":false,"Use ExternalBlockStore":false,"Deserialized":false,"Replication":1},"Number of Partitions":2,"Number of Cached Partitions":0,"Memory Size":0,"ExternalBlockStore Size":0,"Disk Size":0}],"Parent IDs":[],"Details":"org.apache.spark.rdd.RDD.count(RDD.scala:1157)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:37)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:39)\n$line19.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:41)\n$line19.$read$$iwC$$iwC$$iwC.<init>(<console>:43)\n$line19.$read$$iwC$$iwC.<init>(<console>:45)\n$line19.$read$$iwC.<init>(<console>:47)\n$line19.$read.<init>(<console>:49)\n$line19.$read$.<init>(<console>:53)\n$line19.$read$.<clinit>(<console>)\n$line19.$eval$.<init>(<console>:7)\n$line19.$eval$.<clinit>(<console>)\n$line19.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:498)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)","Accumulables":[]}],"Stage IDs":[0],"Properties":{}}
- {"Event":"SparkListenerStageSubmitted","Stage Info":{"Stage ID":0,"Stage Attempt ID":0,"Stage Name":"count at <console>:30","Number of Tasks":2,"RDD Info":[{"RDD ID":1,"Name":"/spark-events/application_1463681113470_0006","Scope":"{\"id\":\"0\",\"name\":\"textFile\"}","Callsite":"textFile at <console>:27","Parent IDs":[0],"Storage Level":{"Use Disk":false,"Use Memory":false,"Use ExternalBlockStore":false,"Deserialized":false,"Replication":1},"Number of Partitions":2,"Number of Cached Partitions":0,"Memory Size":0,"ExternalBlockStore Size":0,"Disk Size":0},{"RDD ID":0,"Name":"/spark-events/application_1463681113470_0006","Scope":"{\"id\":\"0\",\"name\":\"textFile\"}","Callsite":"textFile at <console>:27","Parent IDs":[],"Storage Level":{"Use Disk":false,"Use Memory":false,"Use ExternalBlockStore":false,"Deserialized":false,"Replication":1},"Number of Partitions":2,"Number of Cached Partitions":0,"Memory Size":0,"ExternalBlockStore Size":0,"Disk Size":0}],"Parent IDs":[],"Details":"org.apache.spark.rdd.RDD.count(RDD.scala:1157)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:37)\n$line19.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:39)\n$line19.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:41)\n$line19.$read$$iwC$$iwC$$iwC.<init>(<console>:43)\n$line19.$read$$iwC$$iwC.<init>(<console>:45)\n$line19.$read$$iwC.<init>(<console>:47)\n$line19.$read.<init>(<console>:49)\n$line19.$read$.<init>(<console>:53)\n$line19.$read$.<clinit>(<console>)\n$line19.$eval$.<init>(<console>:7)\n$line19.$eval$.<clinit>(<console>)\n$line19.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:498)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)","Accumulables":[]},"Properties":{}}
- {"Event":"SparkListenerExecutorAdded","Timestamp":1463769050973,"Executor ID":"1","Executor Info":{"Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Total Cores":1,"Log Urls":{"stdout":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000002/weifengc/stdout?start=-4096","stderr":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000002/weifengc/stderr?start=-4096"}}}
- {"Event":"SparkListenerTaskStart","Stage ID":0,"Stage Attempt ID":0,"Task Info":{"Task ID":0,"Index":0,"Attempt":0,"Launch Time":1463769050978,"Executor ID":"1","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":0,"Failed":false,"Accumulables":[]}}
- {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor ID":"1","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":53216},"Maximum Memory":569376768,"Timestamp":1463769051011}
- {"Event":"SparkListenerExecutorAdded","Timestamp":1463769052321,"Executor ID":"2","Executor Info":{"Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Total Cores":1,"Log Urls":{"stdout":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000003/weifengc/stdout?start=-4096","stderr":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000003/weifengc/stderr?start=-4096"}}}
- {"Event":"SparkListenerTaskStart","Stage ID":0,"Stage Attempt ID":0,"Task Info":{"Task ID":1,"Index":1,"Attempt":0,"Launch Time":1463769052321,"Executor ID":"2","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":0,"Failed":false,"Accumulables":[]}}
- {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor ID":"2","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":35636},"Maximum Memory":569376768,"Timestamp":1463769052348}
- {"Event":"SparkListenerBlockManagerRemoved","Block Manager ID":{"Executor ID":"1","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":53216},"Timestamp":1463769061402}
- {"Event":"SparkListenerExecutorRemoved","Timestamp":1463769061637,"Executor ID":"1","Removed Reason":"Container marked as failed: container_1463692924309_0002_01_000002 on host: a9-sldsp-testcluster-worker-6001.iad6.amazon.com. Exit status: 1. Diagnostics: Exception from container-launch.\nContainer id: container_1463692924309_0002_01_000002\nExit code: 1\nStack trace: ExitCodeException exitCode=1: \n\tat org.apache.hadoop.util.Shell.runCommand(Shell.java:538)\n\tat org.apache.hadoop.util.Shell.run(Shell.java:455)\n\tat org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)\n\tat org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:211)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\n\n\nContainer exited with a non-zero exit code 1\n"}
- {"Event":"SparkListenerTaskEnd","Stage ID":0,"Stage Attempt ID":0,"Task Type":"ResultTask","Task End Reason":{"Reason":"ExecutorLostFailure","Executor ID":"1","Exit Caused By App":true,"Loss Reason":"Container marked as failed: container_1463692924309_0002_01_000002 on host: a9-sldsp-testcluster-worker-6001.iad6.amazon.com. Exit status: 1. Diagnostics: Exception from container-launch.\nContainer id: container_1463692924309_0002_01_000002\nExit code: 1\nStack trace: ExitCodeException exitCode=1: \n\tat org.apache.hadoop.util.Shell.runCommand(Shell.java:538)\n\tat org.apache.hadoop.util.Shell.run(Shell.java:455)\n\tat org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)\n\tat org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:211)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\n\n\nContainer exited with a non-zero exit code 1\n"},"Task Info":{"Task ID":0,"Index":0,"Attempt":0,"Launch Time":1463769050978,"Executor ID":"1","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":1463769061634,"Failed":true,"Accumulables":[]}}
- {"Event":"SparkListenerBlockManagerRemoved","Block Manager ID":{"Executor ID":"2","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":35636},"Timestamp":1463769062717}
- {"Event":"SparkListenerTaskEnd","Stage ID":0,"Stage Attempt ID":0,"Task Type":"ResultTask","Task End Reason":{"Reason":"ExecutorLostFailure","Executor ID":"2","Exit Caused By App":true,"Loss Reason":"Container marked as failed: container_1463692924309_0002_01_000003 on host: a9-sldsp-testcluster-worker-6001.iad6.amazon.com. Exit status: 1. Diagnostics: Exception from container-launch.\nContainer id: container_1463692924309_0002_01_000003\nExit code: 1\nStack trace: ExitCodeException exitCode=1: \n\tat org.apache.hadoop.util.Shell.runCommand(Shell.java:538)\n\tat org.apache.hadoop.util.Shell.run(Shell.java:455)\n\tat org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)\n\tat org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:211)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\n\n\nContainer exited with a non-zero exit code 1\n"},"Task Info":{"Task ID":1,"Index":1,"Attempt":0,"Launch Time":1463769052321,"Executor ID":"2","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":1463769064130,"Failed":true,"Accumulables":[]}}
- {"Event":"SparkListenerExecutorRemoved","Timestamp":1463769064130,"Executor ID":"2","Removed Reason":"Container marked as failed: container_1463692924309_0002_01_000003 on host: a9-sldsp-testcluster-worker-6001.iad6.amazon.com. Exit status: 1. Diagnostics: Exception from container-launch.\nContainer id: container_1463692924309_0002_01_000003\nExit code: 1\nStack trace: ExitCodeException exitCode=1: \n\tat org.apache.hadoop.util.Shell.runCommand(Shell.java:538)\n\tat org.apache.hadoop.util.Shell.run(Shell.java:455)\n\tat org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)\n\tat org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:211)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\n\n\nContainer exited with a non-zero exit code 1\n"}
- {"Event":"SparkListenerExecutorAdded","Timestamp":1463769066111,"Executor ID":"3","Executor Info":{"Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Total Cores":1,"Log Urls":{"stdout":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000004/weifengc/stdout?start=-4096","stderr":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000004/weifengc/stderr?start=-4096"}}}
- {"Event":"SparkListenerTaskStart","Stage ID":0,"Stage Attempt ID":0,"Task Info":{"Task ID":2,"Index":1,"Attempt":1,"Launch Time":1463769066112,"Executor ID":"3","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":0,"Failed":false,"Accumulables":[]}}
- {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor ID":"3","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":43706},"Maximum Memory":569376768,"Timestamp":1463769066139}
- {"Event":"SparkListenerExecutorAdded","Timestamp":1463769069733,"Executor ID":"4","Executor Info":{"Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Total Cores":1,"Log Urls":{"stdout":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000005/weifengc/stdout?start=-4096","stderr":"http://a9-sldsp-testcluster-worker-6001.iad6.amazon.com:8088/node/containerlogs/container_1463692924309_0002_01_000005/weifengc/stderr?start=-4096"}}}
- {"Event":"SparkListenerTaskStart","Stage ID":0,"Stage Attempt ID":0,"Task Info":{"Task ID":3,"Index":0,"Attempt":1,"Launch Time":1463769069734,"Executor ID":"4","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":0,"Failed":false,"Accumulables":[]}}
- {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor ID":"4","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":33790},"Maximum Memory":569376768,"Timestamp":1463769069783}
- {"Event":"SparkListenerBlockManagerRemoved","Block Manager ID":{"Executor ID":"3","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":43706},"Timestamp":1463769076508}
- {"Event":"SparkListenerTaskEnd","Stage ID":0,"Stage Attempt ID":0,"Task Type":"ResultTask","Task End Reason":{"Reason":"ExecutorLostFailure","Executor ID":"3","Exit Caused By App":true,"Loss Reason":"Container marked as failed: container_1463692924309_0002_01_000004 on host: a9-sldsp-testcluster-worker-6001.iad6.amazon.com. Exit status: 1. Diagnostics: Exception from container-launch.\nContainer id: container_1463692924309_0002_01_000004\nExit code: 1\nStack trace: ExitCodeException exitCode=1: \n\tat org.apache.hadoop.util.Shell.runCommand(Shell.java:538)\n\tat org.apache.hadoop.util.Shell.run(Shell.java:455)\n\tat org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)\n\tat org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:211)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\n\n\nContainer exited with a non-zero exit code 1\n"},"Task Info":{"Task ID":2,"Index":1,"Attempt":1,"Launch Time":1463769066112,"Executor ID":"3","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":1463769076713,"Failed":true,"Accumulables":[]}}
- {"Event":"SparkListenerExecutorRemoved","Timestamp":1463769076714,"Executor ID":"3","Removed Reason":"Container marked as failed: container_1463692924309_0002_01_000004 on host: a9-sldsp-testcluster-worker-6001.iad6.amazon.com. Exit status: 1. Diagnostics: Exception from container-launch.\nContainer id: container_1463692924309_0002_01_000004\nExit code: 1\nStack trace: ExitCodeException exitCode=1: \n\tat org.apache.hadoop.util.Shell.runCommand(Shell.java:538)\n\tat org.apache.hadoop.util.Shell.run(Shell.java:455)\n\tat org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)\n\tat org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:211)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:302)\n\tat org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:82)\n\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)\n\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)\n\tat java.lang.Thread.run(Thread.java:745)\n\n\nContainer exited with a non-zero exit code 1\n"}
- {"Event":"SparkListenerBlockManagerRemoved","Block Manager ID":{"Executor ID":"4","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Port":33790},"Timestamp":1463769080151}
- {"Event":"SparkListenerTaskEnd","Stage ID":0,"Stage Attempt ID":0,"Task Type":"ResultTask","Task End Reason":{"Reason":"ExecutorLostFailure","Executor ID":"4","Exit Caused By App":true,"Loss Reason":"Slave lost"},"Task Info":{"Task ID":3,"Index":0,"Attempt":1,"Launch Time":1463769069734,"Executor ID":"4","Host":"a9-sldsp-testcluster-worker-6001.iad6.amazon.com","Locality":"NODE_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":1463769080488,"Failed":true,"Accumulables":[]}}
- {"Event":"SparkListenerExecutorRemoved","Timestamp":1463769080488,"Executor ID":"4","Removed Reason":"Slave lost"}
- {"Event":"SparkListenerApplicationEnd","Timestamp":1463774068166}
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement