Advertisement
Guest User

spark issue

a guest
Feb 16th, 2016
110
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 28.93 KB | None | 0 0
  1. [root@phdns01 spark]# su - spark
  2. c[spark@phdns01 ~]$ cd /usr/hdp/2.3.4.0-3485/spark/
  3. [spark@phdns01 spark]$ ./bin/spark-submit --class org.apache.spark.examples.SparkPi --master yarn-client --num-executors 3 --driver-memory 512m --executor-memory 512m --executor-cores 1 lib/spark-examples*.jar 10
  4. 16/02/16 09:13:56 INFO SparkContext: Running Spark version 1.5.2
  5. 16/02/16 09:13:56 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
  6. 16/02/16 09:13:56 INFO SecurityManager: Changing view acls to: spark
  7. 16/02/16 09:13:56 INFO SecurityManager: Changing modify acls to: spark
  8. 16/02/16 09:13:56 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(spark); users with modify permissions: Set(spark)
  9. 16/02/16 09:13:57 INFO Slf4jLogger: Slf4jLogger started
  10. 16/02/16 09:13:57 INFO Remoting: Starting remoting
  11. 16/02/16 09:13:57 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@172.24.64.98:59589]
  12. 16/02/16 09:13:58 INFO Utils: Successfully started service 'sparkDriver' on port 59589.
  13. 16/02/16 09:13:58 INFO SparkEnv: Registering MapOutputTracker
  14. 16/02/16 09:13:58 INFO SparkEnv: Registering BlockManagerMaster
  15. 16/02/16 09:13:58 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-b66b0880-5c88-4fdb-b92e-7437bd6f57ff
  16. 16/02/16 09:13:58 INFO MemoryStore: MemoryStore started with capacity 265.1 MB
  17. 16/02/16 09:13:58 INFO HttpFileServer: HTTP File server directory is /tmp/spark-9320408c-0cd9-4719-9291-887d55d50a4e/httpd-91b13e2f-be9d-4a01-866e-4342d2b0bd97
  18. 16/02/16 09:13:58 INFO HttpServer: Starting HTTP Server
  19. 16/02/16 09:13:58 INFO Server: jetty-8.y.z-SNAPSHOT
  20. 16/02/16 09:13:58 INFO AbstractConnector: Started SocketConnector@0.0.0.0:57892
  21. 16/02/16 09:13:58 INFO Utils: Successfully started service 'HTTP file server' on port 57892.
  22. 16/02/16 09:13:58 INFO SparkEnv: Registering OutputCommitCoordinator
  23. 16/02/16 09:13:58 INFO Server: jetty-8.y.z-SNAPSHOT
  24. 16/02/16 09:13:58 WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use
  25. java.net.BindException: Address already in use
  26. at sun.nio.ch.Net.bind0(Native Method)
  27. at sun.nio.ch.Net.bind(Net.java:433)
  28. at sun.nio.ch.Net.bind(Net.java:425)
  29. at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
  30. at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
  31. at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
  32. at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
  33. at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
  34. at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
  35. at org.spark-project.jetty.server.Server.doStart(Server.java:293)
  36. at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
  37. at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:237)
  38. at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:247)
  39. at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:247)
  40. at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
  41. at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
  42. at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
  43. at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:247)
  44. at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
  45. at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
  46. at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
  47. at scala.Option.foreach(Option.scala:236)
  48. at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
  49. at org.apache.spark.examples.SparkPi$.main(SparkPi.scala:29)
  50. at org.apache.spark.examples.SparkPi.main(SparkPi.scala)
  51. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  52. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  53. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  54. at java.lang.reflect.Method.invoke(Method.java:497)
  55. at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
  56. at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
  57. at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
  58. at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
  59. at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
  60. 16/02/16 09:13:58 WARN AbstractLifeCycle: FAILED org.spark-project.jetty.server.Server@1d572e62: java.net.BindException: Address already in use
  61. java.net.BindException: Address already in use
  62. at sun.nio.ch.Net.bind0(Native Method)
  63. at sun.nio.ch.Net.bind(Net.java:433)
  64. at sun.nio.ch.Net.bind(Net.java:425)
  65. at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
  66. at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
  67. at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
  68. at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
  69. at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
  70. at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
  71. at org.spark-project.jetty.server.Server.doStart(Server.java:293)
  72. at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
  73. at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:237)
  74. at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:247)
  75. at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:247)
  76. at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
  77. at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
  78. at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
  79. at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:247)
  80. at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
  81. at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
  82. at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
  83. at scala.Option.foreach(Option.scala:236)
  84. at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
  85. at org.apache.spark.examples.SparkPi$.main(SparkPi.scala:29)
  86. at org.apache.spark.examples.SparkPi.main(SparkPi.scala)
  87. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  88. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  89. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  90. at java.lang.reflect.Method.invoke(Method.java:497)
  91. at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
  92. at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
  93. at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
  94. at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
  95. at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
  96. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
  97. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null}
  98. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}
  99. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}
  100. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
  101. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
  102. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}
  103. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}
  104. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}
  105. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}
  106. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
  107. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
  108. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null}
  109. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null}
  110. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
  111. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
  112. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
  113. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
  114. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null}
  115. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null}
  116. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
  117. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
  118. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
  119. 16/02/16 09:13:58 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null}
  120. 16/02/16 09:13:58 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.
  121. 16/02/16 09:13:58 INFO Server: jetty-8.y.z-SNAPSHOT
  122. 16/02/16 09:13:58 INFO AbstractConnector: Started SelectChannelConnector@0.0.0.0:4041
  123. 16/02/16 09:13:58 INFO Utils: Successfully started service 'SparkUI' on port 4041.
  124. 16/02/16 09:13:58 INFO SparkUI: Started SparkUI at http://172.24.64.98:4041
  125. 16/02/16 09:13:59 INFO SparkContext: Added JAR file:/usr/hdp/2.3.4.0-3485/spark/lib/spark-examples-1.5.2.2.3.4.0-3485-hadoop2.7.1.2.3.4.0-3485.jar at http://172.24.64.98:57892/jars/spark-examples-1.5.2.2.3.4.0-3485-hadoop2.7.1.2.3.4.0-3485.jar with timestamp 1455642839368
  126. 16/02/16 09:13:59 WARN MetricsSystem: Using default name DAGScheduler for source because spark.app.id is not set.
  127. spark.yarn.driver.memoryOverhead is set but does not apply in client mode.
  128. 16/02/16 09:13:59 INFO TimelineClientImpl: Timeline service address: http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  129. 16/02/16 09:14:00 INFO RMProxy: Connecting to ResourceManager at phdns02.cloud.hortonworks.com/172.24.64.101:8050
  130. 16/02/16 09:14:00 WARN DomainSocketFactory: The short-circuit local reads feature cannot be used because libhadoop cannot be loaded.
  131. 16/02/16 09:14:01 INFO Client: Requesting a new application from cluster with 1 NodeManagers
  132. 16/02/16 09:14:01 INFO Client: Verifying our application has not requested more than the maximum memory capability of the cluster (12288 MB per container)
  133. 16/02/16 09:14:01 INFO Client: Will allocate AM container, with 896 MB memory including 384 MB overhead
  134. 16/02/16 09:14:01 INFO Client: Setting up container launch context for our AM
  135. 16/02/16 09:14:01 INFO Client: Setting up the launch environment for our AM container
  136. 16/02/16 09:14:01 INFO Client: Preparing resources for our AM container
  137. 16/02/16 09:14:08 INFO Client: Uploading resource file:/usr/hdp/2.3.4.0-3485/spark/lib/spark-assembly-1.5.2.2.3.4.0-3485-hadoop2.7.1.2.3.4.0-3485.jar -> hdfs://phdns01.cloud.hortonworks.com:8020/user/spark/.sparkStaging/application_1455623665941_0003/spark-assembly-1.5.2.2.3.4.0-3485-hadoop2.7.1.2.3.4.0-3485.jar
  138. 16/02/16 09:14:26 INFO Client: Uploading resource file:/tmp/spark-9320408c-0cd9-4719-9291-887d55d50a4e/__spark_conf__2121412702364171389.zip -> hdfs://phdns01.cloud.hortonworks.com:8020/user/spark/.sparkStaging/application_1455623665941_0003/__spark_conf__2121412702364171389.zip
  139. 16/02/16 09:14:27 INFO SecurityManager: Changing view acls to: spark
  140. 16/02/16 09:14:27 INFO SecurityManager: Changing modify acls to: spark
  141. 16/02/16 09:14:27 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(spark); users with modify permissions: Set(spark)
  142. 16/02/16 09:14:27 INFO Client: Submitting application 3 to ResourceManager
  143. 16/02/16 09:14:27 INFO YarnClientImpl: Submitted application application_1455623665941_0003
  144. 16/02/16 09:14:27 INFO YarnExtensionServices: Starting Yarn extension services with app application_1455623665941_0003 and attemptId None
  145. 16/02/16 09:14:27 INFO YarnHistoryService: Starting YarnHistoryService for application application_1455623665941_0003 attempt None; state=1; endpoint=http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/; bonded to ATS=false; listening=false; batchSize=10; flush count=0; total number queued=0, processed=0; attempted entity posts=0 successful entity posts=0 failed entity posts=0; events dropped=0; app start event received=false; app end event received=false;
  146. 16/02/16 09:14:27 INFO YarnHistoryService: Spark events will be published to the Timeline service at http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  147. 16/02/16 09:14:27 INFO TimelineClientImpl: Timeline service address: http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  148. 16/02/16 09:14:27 INFO YarnHistoryService: History Service listening for events: YarnHistoryService for application application_1455623665941_0003 attempt None; state=1; endpoint=http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/; bonded to ATS=true; listening=true; batchSize=10; flush count=0; total number queued=0, processed=0; attempted entity posts=0 successful entity posts=0 failed entity posts=0; events dropped=0; app start event received=false; app end event received=false;
  149. 16/02/16 09:14:27 INFO YarnExtensionServices: Service org.apache.spark.deploy.yarn.history.YarnHistoryService started
  150. 16/02/16 09:14:28 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  151. 16/02/16 09:14:28 INFO Client:
  152. client token: N/A
  153. diagnostics: N/A
  154. ApplicationMaster host: N/A
  155. ApplicationMaster RPC port: -1
  156. queue: default
  157. start time: 1455642867283
  158. final status: UNDEFINED
  159. tracking URL: http://phdns02.cloud.hortonworks.com:8088/proxy/application_1455623665941_0003/
  160. user: spark
  161. 16/02/16 09:14:29 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  162. 16/02/16 09:14:30 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  163. 16/02/16 09:14:31 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  164. 16/02/16 09:14:32 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  165. 16/02/16 09:14:33 INFO YarnSchedulerBackend$YarnSchedulerEndpoint: ApplicationMaster registered as AkkaRpcEndpointRef(Actor[akka.tcp://sparkYarnAM@172.24.64.101:34739/user/YarnAM#-342514098])
  166. 16/02/16 09:14:33 INFO YarnClientSchedulerBackend: Add WebUI Filter. org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter, Map(PROXY_HOSTS -> phdns02.cloud.hortonworks.com, PROXY_URI_BASES -> http://phdns02.cloud.hortonworks.com:8088/proxy/application_1455623665941_0003), /proxy/application_1455623665941_0003
  167. 16/02/16 09:14:33 INFO JettyUtils: Adding filter: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter
  168. 16/02/16 09:14:33 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  169. 16/02/16 09:14:34 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  170. 16/02/16 09:14:35 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  171. 16/02/16 09:14:36 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  172. 16/02/16 09:14:37 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  173. 16/02/16 09:14:38 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  174. 16/02/16 09:14:39 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  175. 16/02/16 09:14:40 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  176. 16/02/16 09:14:41 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  177. 16/02/16 09:14:42 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  178. 16/02/16 09:14:43 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  179. 16/02/16 09:14:44 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  180. 16/02/16 09:14:45 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  181. 16/02/16 09:14:46 INFO Client: Application report for application_1455623665941_0003 (state: ACCEPTED)
  182. 16/02/16 09:14:47 INFO Client: Application report for application_1455623665941_0003 (state: RUNNING)
  183. 16/02/16 09:14:47 INFO Client:
  184. client token: N/A
  185. diagnostics: N/A
  186. ApplicationMaster host: 172.24.64.101
  187. ApplicationMaster RPC port: 0
  188. queue: default
  189. start time: 1455642867283
  190. final status: UNDEFINED
  191. tracking URL: http://phdns02.cloud.hortonworks.com:8088/proxy/application_1455623665941_0003/
  192. user: spark
  193. 16/02/16 09:14:47 INFO YarnClientSchedulerBackend: Application application_1455623665941_0003 has started running.
  194. 16/02/16 09:14:48 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 42492.
  195. 16/02/16 09:14:48 INFO NettyBlockTransferService: Server created on 42492
  196. 16/02/16 09:14:48 INFO BlockManagerMaster: Trying to register BlockManager
  197. 16/02/16 09:14:48 INFO BlockManagerMasterEndpoint: Registering block manager 172.24.64.98:42492 with 265.1 MB RAM, BlockManagerId(driver, 172.24.64.98, 42492)
  198. 16/02/16 09:14:48 INFO BlockManagerMaster: Registered BlockManager
  199. 16/02/16 09:14:48 INFO YarnClientSchedulerBackend: SchedulerBackend is ready for scheduling beginning after waiting maxRegisteredResourcesWaitingTime: 30000(ms)
  200. 16/02/16 09:14:48 INFO YarnHistoryService: Application started: SparkListenerApplicationStart(Spark Pi,Some(application_1455623665941_0003),1455642836002,spark,None,None)
  201. 16/02/16 09:14:48 INFO YarnHistoryService: About to POST entity application_1455623665941_0003 with 3 events to timeline service http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  202. 16/02/16 09:14:48 INFO SparkContext: Starting job: reduce at SparkPi.scala:36
  203. 16/02/16 09:14:48 INFO DAGScheduler: Got job 0 (reduce at SparkPi.scala:36) with 10 output partitions
  204. 16/02/16 09:14:48 INFO DAGScheduler: Final stage: ResultStage 0(reduce at SparkPi.scala:36)
  205. 16/02/16 09:14:48 INFO DAGScheduler: Parents of final stage: List()
  206. 16/02/16 09:14:48 INFO DAGScheduler: Missing parents: List()
  207. 16/02/16 09:14:48 INFO DAGScheduler: Submitting ResultStage 0 (MapPartitionsRDD[1] at map at SparkPi.scala:32), which has no missing parents
  208. 16/02/16 09:14:48 INFO MemoryStore: ensureFreeSpace(1888) called with curMem=0, maxMem=278019440
  209. 16/02/16 09:14:48 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 1888.0 B, free 265.1 MB)
  210. 16/02/16 09:14:48 INFO MemoryStore: ensureFreeSpace(1202) called with curMem=1888, maxMem=278019440
  211. 16/02/16 09:14:48 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 1202.0 B, free 265.1 MB)
  212. 16/02/16 09:14:48 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 172.24.64.98:42492 (size: 1202.0 B, free: 265.1 MB)
  213. 16/02/16 09:14:48 INFO SparkContext: Created broadcast 0 from broadcast at DAGScheduler.scala:861
  214. 16/02/16 09:14:49 INFO DAGScheduler: Submitting 10 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at SparkPi.scala:32)
  215. 16/02/16 09:14:49 INFO YarnScheduler: Adding task set 0.0 with 10 tasks
  216. 16/02/16 09:14:58 INFO YarnClientSchedulerBackend: Registered executor: AkkaRpcEndpointRef(Actor[akka.tcp://sparkExecutor@phdns02.cloud.hortonworks.com:26372/user/Executor#1289874554]) with ID 1
  217. 16/02/16 09:14:59 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  218. 16/02/16 09:15:00 INFO YarnClientSchedulerBackend: Registered executor: AkkaRpcEndpointRef(Actor[akka.tcp://sparkExecutor@phdns02.cloud.hortonworks.com:25573/user/Executor#-1053639674]) with ID 2
  219. 16/02/16 09:15:00 INFO TaskSetManager: Starting task 1.0 in stage 0.0 (TID 1, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  220. 16/02/16 09:15:00 INFO BlockManagerMasterEndpoint: Registering block manager phdns02.cloud.hortonworks.com:35987 with 265.1 MB RAM, BlockManagerId(1, phdns02.cloud.hortonworks.com, 35987)
  221. 16/02/16 09:15:01 INFO BlockManagerMasterEndpoint: Registering block manager phdns02.cloud.hortonworks.com:31335 with 265.1 MB RAM, BlockManagerId(2, phdns02.cloud.hortonworks.com, 31335)
  222. 16/02/16 09:15:03 INFO YarnClientSchedulerBackend: Registered executor: AkkaRpcEndpointRef(Actor[akka.tcp://sparkExecutor@phdns02.cloud.hortonworks.com:18407/user/Executor#-937245840]) with ID 3
  223. 16/02/16 09:15:03 INFO TaskSetManager: Starting task 2.0 in stage 0.0 (TID 2, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  224. 16/02/16 09:15:03 INFO YarnHistoryService: About to POST entity application_1455623665941_0003 with 10 events to timeline service http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  225. 16/02/16 09:15:04 INFO BlockManagerMasterEndpoint: Registering block manager phdns02.cloud.hortonworks.com:22422 with 265.1 MB RAM, BlockManagerId(3, phdns02.cloud.hortonworks.com, 22422)
  226. 16/02/16 09:15:05 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on phdns02.cloud.hortonworks.com:35987 (size: 1202.0 B, free: 265.1 MB)
  227. 16/02/16 09:15:06 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on phdns02.cloud.hortonworks.com:31335 (size: 1202.0 B, free: 265.1 MB)
  228. 16/02/16 09:15:07 INFO TaskSetManager: Starting task 3.0 in stage 0.0 (TID 3, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  229. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 8062 ms on phdns02.cloud.hortonworks.com (1/10)
  230. 16/02/16 09:15:07 INFO TaskSetManager: Starting task 4.0 in stage 0.0 (TID 4, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  231. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 3.0 in stage 0.0 (TID 3) in 40 ms on phdns02.cloud.hortonworks.com (2/10)
  232. 16/02/16 09:15:07 INFO TaskSetManager: Starting task 5.0 in stage 0.0 (TID 5, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  233. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 4.0 in stage 0.0 (TID 4) in 35 ms on phdns02.cloud.hortonworks.com (3/10)
  234. 16/02/16 09:15:07 INFO TaskSetManager: Starting task 6.0 in stage 0.0 (TID 6, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  235. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 5.0 in stage 0.0 (TID 5) in 39 ms on phdns02.cloud.hortonworks.com (4/10)
  236. 16/02/16 09:15:07 INFO TaskSetManager: Starting task 7.0 in stage 0.0 (TID 7, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  237. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 6.0 in stage 0.0 (TID 6) in 41 ms on phdns02.cloud.hortonworks.com (5/10)
  238. 16/02/16 09:15:07 INFO TaskSetManager: Starting task 8.0 in stage 0.0 (TID 8, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  239. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 1.0 in stage 0.0 (TID 1) in 6898 ms on phdns02.cloud.hortonworks.com (6/10)
  240. 16/02/16 09:15:07 INFO TaskSetManager: Starting task 9.0 in stage 0.0 (TID 9, phdns02.cloud.hortonworks.com, PROCESS_LOCAL, 2188 bytes)
  241. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 7.0 in stage 0.0 (TID 7) in 26 ms on phdns02.cloud.hortonworks.com (7/10)
  242. 16/02/16 09:15:07 INFO YarnHistoryService: About to POST entity application_1455623665941_0003 with 10 events to timeline service http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  243. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 8.0 in stage 0.0 (TID 8) in 39 ms on phdns02.cloud.hortonworks.com (8/10)
  244. 16/02/16 09:15:07 INFO TaskSetManager: Finished task 9.0 in stage 0.0 (TID 9) in 42 ms on phdns02.cloud.hortonworks.com (9/10)
  245. 16/02/16 09:15:08 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on phdns02.cloud.hortonworks.com:22422 (size: 1202.0 B, free: 265.1 MB)
  246. 16/02/16 09:15:08 INFO TaskSetManager: Finished task 2.0 in stage 0.0 (TID 2) in 5390 ms on phdns02.cloud.hortonworks.com (10/10)
  247. 16/02/16 09:15:08 INFO YarnScheduler: Removed TaskSet 0.0, whose tasks have all completed, from pool
  248. 16/02/16 09:15:08 INFO DAGScheduler: ResultStage 0 (reduce at SparkPi.scala:36) finished in 19.621 s
  249. 16/02/16 09:15:08 INFO DAGScheduler: Job 0 finished: reduce at SparkPi.scala:36, took 19.881300 s
  250. Pi is roughly 3.140252
  251. 16/02/16 09:15:08 INFO YarnHistoryService: Application end event: SparkListenerApplicationEnd(1455642908646)
  252. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
  253. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
  254. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null}
  255. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}
  256. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}
  257. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
  258. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
  259. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}
  260. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}
  261. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}
  262. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}
  263. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
  264. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
  265. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null}
  266. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null}
  267. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
  268. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
  269. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
  270. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
  271. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null}
  272. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null}
  273. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
  274. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
  275. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
  276. 16/02/16 09:15:08 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null}
  277. 16/02/16 09:15:08 INFO YarnHistoryService: About to POST entity application_1455623665941_0003 with 10 events to timeline service http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  278. 16/02/16 09:15:08 INFO SparkUI: Stopped Spark web UI at http://172.24.64.98:4041
  279. 16/02/16 09:15:08 INFO DAGScheduler: Stopping DAGScheduler
  280. 16/02/16 09:15:08 INFO YarnClientSchedulerBackend: Interrupting monitor thread
  281. 16/02/16 09:15:08 INFO YarnClientSchedulerBackend: Shutting down all executors
  282. 16/02/16 09:15:08 INFO YarnClientSchedulerBackend: Asking each executor to shut down
  283. 16/02/16 09:15:08 INFO YarnExtensionServices: Stopping org.apache.spark.scheduler.cluster.YarnExtensionServices@42e4e589
  284. 16/02/16 09:15:08 INFO YarnHistoryService: About to POST entity application_1455623665941_0003 with 1 events to timeline service http://phdns02.cloud.hortonworks.com:8188/ws/v1/timeline/
  285. 16/02/16 09:15:08 INFO YarnHistoryService: Stopping dequeue service, final queue size is 0
  286. 16/02/16 09:15:08 INFO YarnClientSchedulerBackend: Stopped
  287. 16/02/16 09:15:08 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
  288. 16/02/16 09:15:08 INFO MemoryStore: MemoryStore cleared
  289. 16/02/16 09:15:08 INFO BlockManager: BlockManager stopped
  290. 16/02/16 09:15:08 INFO BlockManagerMaster: BlockManagerMaster stopped
  291. 16/02/16 09:15:08 INFO SparkContext: Successfully stopped SparkContext
  292. 16/02/16 09:15:08 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
  293. 16/02/16 09:15:08 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon.
  294. 16/02/16 09:15:09 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports.
  295. 16/02/16 09:15:09 INFO ShutdownHookManager: Shutdown hook called
  296. 16/02/16 09:15:09 INFO ShutdownHookManager: Deleting directory /tmp/spark-9320408c-0cd9-4719-9291-887d55d50a4e
  297. [spark@phdns01 spark]$
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement