Advertisement
Guest User

Untitled

a guest
Aug 19th, 2016
376
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 9.83 KB | None | 0 0
  1. spark.yarn.driver.memoryOverhead is set but does not apply in client mode.
  2. Traceback (most recent call last):
  3. File "/usr/hdp/2.4.0.0-169/spark/python/test3.py", line 51, in <module>
  4. valueConverter=valueConv)
  5. File "/usr/hdp/2.4.0.0-169/spark/python/pyspark/rdd.py", line 1346, in saveAsNewAPIHadoopDataset
  6. keyConverter, valueConverter, True)
  7. File "/usr/hdp/2.4.0.0-169/spark/python/lib/py4j-0.9-src.zip/py4j/java_gateway.py", line 813, in __call__
  8. File "/usr/hdp/2.4.0.0-169/spark/python/lib/py4j-0.9-src.zip/py4j/protocol.py", line 308, in get_return_value
  9. py4j.protocol.Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  10. : org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 2.0 failed 4 times, most recent failure: Lost task 1.3 in stage 2.0 (TID 6, host2): java.lang.RuntimeException: java.lang.NullPointerException
  11. at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:208)
  12. at org.apache.hadoop.hbase.client.ClientSmallReversedScanner.loadCache(ClientSmallReversedScanner.java:211)
  13. at org.apache.hadoop.hbase.client.ClientSmallReversedScanner.next(ClientSmallReversedScanner.java:185)
  14. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1256)
  15. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1162)
  16. at org.apache.hadoop.hbase.client.AsyncProcess.submit(AsyncProcess.java:370)
  17. at org.apache.hadoop.hbase.client.AsyncProcess.submit(AsyncProcess.java:321)
  18. at org.apache.hadoop.hbase.client.BufferedMutatorImpl.backgroundFlushCommits(BufferedMutatorImpl.java:206)
  19. at org.apache.hadoop.hbase.client.BufferedMutatorImpl.close(BufferedMutatorImpl.java:158)
  20. at org.apache.hadoop.hbase.mapreduce.TableOutputFormat$TableRecordWriter.close(TableOutputFormat.java:120)
  21. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$5.apply$mcV$sp(PairRDDFunctions.scala:1120)
  22. at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1259)
  23. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1119)
  24. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1091)
  25. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
  26. at org.apache.spark.scheduler.Task.run(Task.scala:89)
  27. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
  28. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  29. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  30. at java.lang.Thread.run(Thread.java:745)
  31. Caused by: java.lang.NullPointerException
  32. at org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.getMetaReplicaNodes(ZooKeeperWatcher.java:399)
  33. at org.apache.hadoop.hbase.zookeeper.MetaTableLocator.blockUntilAvailable(MetaTableLocator.java:552)
  34. at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getMetaRegionLocation(ZooKeeperRegistry.java:61)
  35. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateMeta(ConnectionManager.java:1192)
  36. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1159)
  37. at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:300)
  38. at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:151)
  39. at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:59)
  40. at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
  41. ... 19 more
  42.  
  43. Driver stacktrace:
  44. at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1431)
  45. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1419)
  46. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1418)
  47. at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  48. at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
  49. at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1418)
  50. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)
  51. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)
  52. at scala.Option.foreach(Option.scala:236)
  53. at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:799)
  54. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1640)
  55. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1599)
  56. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1588)
  57. at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  58. at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:620)
  59. at org.apache.spark.SparkContext.runJob(SparkContext.scala:1832)
  60. at org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)
  61. at org.apache.spark.SparkContext.runJob(SparkContext.scala:1922)
  62. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1146)
  63. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1.apply(PairRDDFunctions.scala:1074)
  64. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1.apply(PairRDDFunctions.scala:1074)
  65. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
  66. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
  67. at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
  68. at org.apache.spark.rdd.PairRDDFunctions.saveAsNewAPIHadoopDataset(PairRDDFunctions.scala:1074)
  69. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:804)
  70. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  71. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  72. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  73. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  74. at java.lang.reflect.Method.invoke(Method.java:497)
  75. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  76. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:381)
  77. at py4j.Gateway.invoke(Gateway.java:259)
  78. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  79. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  80. at py4j.GatewayConnection.run(GatewayConnection.java:209)
  81. at java.lang.Thread.run(Thread.java:745)
  82. Caused by: java.lang.RuntimeException: java.lang.NullPointerException
  83. at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:208)
  84. at org.apache.hadoop.hbase.client.ClientSmallReversedScanner.loadCache(ClientSmallReversedScanner.java:211)
  85. at org.apache.hadoop.hbase.client.ClientSmallReversedScanner.next(ClientSmallReversedScanner.java:185)
  86. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegionInMeta(ConnectionManager.java:1256)
  87. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1162)
  88. at org.apache.hadoop.hbase.client.AsyncProcess.submit(AsyncProcess.java:370)
  89. at org.apache.hadoop.hbase.client.AsyncProcess.submit(AsyncProcess.java:321)
  90. at org.apache.hadoop.hbase.client.BufferedMutatorImpl.backgroundFlushCommits(BufferedMutatorImpl.java:206)
  91. at org.apache.hadoop.hbase.client.BufferedMutatorImpl.close(BufferedMutatorImpl.java:158)
  92. at org.apache.hadoop.hbase.mapreduce.TableOutputFormat$TableRecordWriter.close(TableOutputFormat.java:120)
  93. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$5.apply$mcV$sp(PairRDDFunctions.scala:1120)
  94. at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1259)
  95. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1119)
  96. at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1091)
  97. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
  98. at org.apache.spark.scheduler.Task.run(Task.scala:89)
  99. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
  100. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  101. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  102. ... 1 more
  103. Caused by: java.lang.NullPointerException
  104. at org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.getMetaReplicaNodes(ZooKeeperWatcher.java:399)
  105. at org.apache.hadoop.hbase.zookeeper.MetaTableLocator.blockUntilAvailable(MetaTableLocator.java:552)
  106. at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getMetaRegionLocation(ZooKeeperRegistry.java:61)
  107. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateMeta(ConnectionManager.java:1192)
  108. at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1159)
  109. at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:300)
  110. at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:151)
  111. at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:59)
  112. at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
  113. ... 19 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement