Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- >>>begin main...
- >>>begin run...
- >>>begin gengerateSparkContext...
- 18/01/21 20:40:39 ERROR TransportClient: Failed to send RPC 8159149554508405784 to /10.255.129.205:32571: java.nio.channels.ClosedChannelException
- java.nio.channels.ClosedChannelException
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(...)(Unknown Source)
- 18/01/21 20:40:39 ERROR YarnSchedulerBackend$YarnSchedulerEndpoint: Sending RequestExecutors(40,0,Map()) to AM was unsuccessful
- java.io.IOException: Failed to send RPC 8159149554508405784 to /10.255.129.205:32571: java.nio.channels.ClosedChannelException
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:249)
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:233)
- at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:514)
- at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:488)
- at io.netty.util.concurrent.DefaultPromise.access$000(DefaultPromise.java:34)
- at io.netty.util.concurrent.DefaultPromise$1.run(DefaultPromise.java:438)
- at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:408)
- at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:455)
- at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140)
- at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
- at java.lang.Thread.run(Thread.java:745)
- Caused by: java.nio.channels.ClosedChannelException
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(...)(Unknown Source)
- 18/01/21 20:40:39 ERROR SparkContext: Error initializing SparkContext.
- org.apache.spark.SparkException: Exception thrown in awaitResult
- at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:77)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:75)
- at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:36)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
- at scala.PartialFunction$OrElse.apply(PartialFunction.scala:167)
- at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:83)
- at org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend.requestTotalExecutors(CoarseGrainedSchedulerBackend.scala:512)
- at org.apache.spark.ExecutorAllocationManager.start(ExecutorAllocationManager.scala:236)
- at org.apache.spark.SparkContext$$anonfun$21.apply(SparkContext.scala:552)
- at org.apache.spark.SparkContext$$anonfun$21.apply(SparkContext.scala:552)
- at scala.Option.foreach(Option.scala:257)
- at org.apache.spark.SparkContext.<init>(SparkContext.scala:552)
- at aiTest.utils.JavaUtil.generateSparkContext(JavaUtil.java:42)
- at aiTest.model.MovieRecommendationTrain.run(MovieRecommendationTrain.java:187)
- at aiTest.model.MovieRecommendationTrain.main(MovieRecommendationTrain.java:212)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:497)
- at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:738)
- at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
- at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
- at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
- at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
- Caused by: java.io.IOException: Failed to send RPC 8159149554508405784 to /10.255.129.205:32571: java.nio.channels.ClosedChannelException
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:249)
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:233)
- at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:514)
- at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:488)
- at io.netty.util.concurrent.DefaultPromise.access$000(DefaultPromise.java:34)
- at io.netty.util.concurrent.DefaultPromise$1.run(DefaultPromise.java:438)
- at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:408)
- at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:455)
- at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140)
- at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
- at java.lang.Thread.run(Thread.java:745)
- Caused by: java.nio.channels.ClosedChannelException
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(...)(Unknown Source)
- 18/01/21 20:40:39 ERROR TransportClient: Failed to send RPC 4662371465967043083 to /10.255.129.205:32571: java.nio.channels.ClosedChannelException
- java.nio.channels.ClosedChannelException
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(...)(Unknown Source)
- 18/01/21 20:40:39 ERROR YarnSchedulerBackend$YarnSchedulerEndpoint: Sending RequestExecutors(0,0,Map()) to AM was unsuccessful
- java.io.IOException: Failed to send RPC 4662371465967043083 to /10.255.129.205:32571: java.nio.channels.ClosedChannelException
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:249)
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:233)
- at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:514)
- at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:488)
- at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:427)
- at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:129)
- at io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetFailure(AbstractChannel.java:852)
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(AbstractChannel.java:738)
- at io.netty.channel.DefaultChannelPipeline$HeadContext.write(DefaultChannelPipeline.java:1251)
- at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:743)
- at io.netty.channel.AbstractChannelHandlerContext.invokeWrite(AbstractChannelHandlerContext.java:735)
- at io.netty.channel.AbstractChannelHandlerContext.access$1900(AbstractChannelHandlerContext.java:36)
- at io.netty.channel.AbstractChannelHandlerContext$AbstractWriteTask.write(AbstractChannelHandlerContext.java:1072)
- at io.netty.channel.AbstractChannelHandlerContext$WriteAndFlushTask.write(AbstractChannelHandlerContext.java:1126)
- at io.netty.channel.AbstractChannelHandlerContext$AbstractWriteTask.run(AbstractChannelHandlerContext.java:1061)
- at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:408)
- at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:455)
- at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140)
- at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
- at java.lang.Thread.run(Thread.java:745)
- Caused by: java.nio.channels.ClosedChannelException
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(...)(Unknown Source)
- 18/01/21 20:40:39 ERROR Utils: Uncaught exception in thread main
- org.apache.spark.SparkException: Exception thrown in awaitResult
- at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:77)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:75)
- at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:36)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
- at scala.PartialFunction$OrElse.apply(PartialFunction.scala:167)
- at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:83)
- at org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend.requestTotalExecutors(CoarseGrainedSchedulerBackend.scala:512)
- at org.apache.spark.scheduler.cluster.YarnSchedulerBackend.stop(YarnSchedulerBackend.scala:93)
- at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.stop(YarnClientSchedulerBackend.scala:151)
- at org.apache.spark.scheduler.TaskSchedulerImpl.stop(TaskSchedulerImpl.scala:467)
- at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:1588)
- at org.apache.spark.SparkContext$$anonfun$stop$8.apply$mcV$sp(SparkContext.scala:1826)
- at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1283)
- at org.apache.spark.SparkContext.stop(SparkContext.scala:1825)
- at org.apache.spark.SparkContext.<init>(SparkContext.scala:587)
- at aiTest.utils.JavaUtil.generateSparkContext(JavaUtil.java:42)
- at aiTest.model.MovieRecommendationTrain.run(MovieRecommendationTrain.java:187)
- at aiTest.model.MovieRecommendationTrain.main(MovieRecommendationTrain.java:212)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:497)
- at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:738)
- at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
- at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
- at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
- at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
- Caused by: java.io.IOException: Failed to send RPC 4662371465967043083 to /10.255.129.205:32571: java.nio.channels.ClosedChannelException
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:249)
- at org.apache.spark.network.client.TransportClient$3.operationComplete(TransportClient.java:233)
- at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:514)
- at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:488)
- at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:427)
- at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:129)
- at io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetFailure(AbstractChannel.java:852)
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(AbstractChannel.java:738)
- at io.netty.channel.DefaultChannelPipeline$HeadContext.write(DefaultChannelPipeline.java:1251)
- at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:743)
- at io.netty.channel.AbstractChannelHandlerContext.invokeWrite(AbstractChannelHandlerContext.java:735)
- at io.netty.channel.AbstractChannelHandlerContext.access$1900(AbstractChannelHandlerContext.java:36)
- at io.netty.channel.AbstractChannelHandlerContext$AbstractWriteTask.write(AbstractChannelHandlerContext.java:1072)
- at io.netty.channel.AbstractChannelHandlerContext$WriteAndFlushTask.write(AbstractChannelHandlerContext.java:1126)
- at io.netty.channel.AbstractChannelHandlerContext$AbstractWriteTask.run(AbstractChannelHandlerContext.java:1061)
- at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:408)
- at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:455)
- at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140)
- at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
- at java.lang.Thread.run(Thread.java:745)
- Caused by: java.nio.channels.ClosedChannelException
- at io.netty.channel.AbstractChannel$AbstractUnsafe.write(...)(Unknown Source)
- Exception in thread "main" org.apache.spark.SparkException: Exception thrown in awaitResult
- at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:77)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:75)
- at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:36)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
- at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
- at scala.PartialFunction$OrElse.apply(PartialFunction.scala:167)
- at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:83)
- at org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend.requestTotalExecutors(CoarseGrainedSchedulerBackend.scala:512)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement