Advertisement
Guest User

Spark Upsert Error

a guest
Apr 5th, 2018
404
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 15.64 KB | None | 0 0
  1. An error occurred while calling o127.save.
  2. : org.apache.spark.SparkException: Job aborted due to stage failure: Task 2 in stage 4.0 failed 4 times, most recent failure: Lost task 2.3 in stage 4.0 (TID 90, wn2-MDMstr.zxmmgisclg5udfemnv0v3qva3e.ax.internal.cloudapp.net, executor 2): java.lang.NoClassDefFoundError: com/microsoft/azure/documentdb/bulkexecutor/DocumentBulkExecutor
  3. at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.getDocumentBulkImporter(CosmosDBConnection.scala:93)
  4. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.bulkImport(CosmosDBSpark.scala:238)
  5. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.savePartition(CosmosDBSpark.scala:439)
  6. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.com$microsoft$azure$cosmosdb$spark$CosmosDBSpark$$savePartition(CosmosDBSpark.scala:395)
  7. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:186)
  8. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:177)
  9. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  10. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  11. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  12. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  13. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  14. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  15. at org.apache.spark.scheduler.Task.run(Task.scala:108)
  16. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
  17. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  18. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  19. at java.lang.Thread.run(Thread.java:748)
  20.  
  21. Driver stacktrace:
  22. at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
  23. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
  24. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
  25. at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  26. at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  27. at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
  28. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  29. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  30. at scala.Option.foreach(Option.scala:257)
  31. at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
  32. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1732)
  33. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1687)
  34. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1676)
  35. at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  36. at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
  37. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
  38. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2050)
  39. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2069)
  40. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2094)
  41. at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936)
  42. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  43. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  44. at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
  45. at org.apache.spark.rdd.RDD.collect(RDD.scala:935)
  46. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.save(CosmosDBSpark.scala:187)
  47. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.save(CosmosDBSpark.scala:472)
  48. at com.microsoft.azure.cosmosdb.spark.DefaultSource.createRelation(DefaultSource.scala:69)
  49. at org.apache.spark.sql.execution.datasources.DataSource.write(DataSource.scala:471)
  50. at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:50)
  51. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
  52. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
  53. at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  54. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
  55. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
  56. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
  57. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  58. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
  59. at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
  60. at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
  61. at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
  62. at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:609)
  63. at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:233)
  64. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  65. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  66. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  67. at java.lang.reflect.Method.invoke(Method.java:498)
  68. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  69. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  70. at py4j.Gateway.invoke(Gateway.java:280)
  71. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  72. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  73. at py4j.GatewayConnection.run(GatewayConnection.java:214)
  74. at java.lang.Thread.run(Thread.java:748)
  75. Caused by: java.lang.NoClassDefFoundError: com/microsoft/azure/documentdb/bulkexecutor/DocumentBulkExecutor
  76. at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.getDocumentBulkImporter(CosmosDBConnection.scala:93)
  77. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.bulkImport(CosmosDBSpark.scala:238)
  78. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.savePartition(CosmosDBSpark.scala:439)
  79. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.com$microsoft$azure$cosmosdb$spark$CosmosDBSpark$$savePartition(CosmosDBSpark.scala:395)
  80. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:186)
  81. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:177)
  82. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  83. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  84. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  85. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  86. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  87. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  88. at org.apache.spark.scheduler.Task.run(Task.scala:108)
  89. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
  90. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  91. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  92. ... 1 more
  93.  
  94. Traceback (most recent call last):
  95. File "/usr/hdp/current/spark2-client/python/pyspark/sql/readwriter.py", line 593, in save
  96. self._jwrite.save()
  97. File "/usr/hdp/current/spark2-client/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1133, in __call__
  98. answer, self.gateway_client, self.target_id, self.name)
  99. File "/usr/hdp/current/spark2-client/python/pyspark/sql/utils.py", line 63, in deco
  100. return f(*a, **kw)
  101. File "/usr/hdp/current/spark2-client/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py", line 319, in get_return_value
  102. format(target_id, ".", name), value)
  103. py4j.protocol.Py4JJavaError: An error occurred while calling o127.save.
  104. : org.apache.spark.SparkException: Job aborted due to stage failure: Task 2 in stage 4.0 failed 4 times, most recent failure: Lost task 2.3 in stage 4.0 (TID 90, wn2-MDMstr.zxmmgisclg5udfemnv0v3qva3e.ax.internal.cloudapp.net, executor 2): java.lang.NoClassDefFoundError: com/microsoft/azure/documentdb/bulkexecutor/DocumentBulkExecutor
  105. at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.getDocumentBulkImporter(CosmosDBConnection.scala:93)
  106. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.bulkImport(CosmosDBSpark.scala:238)
  107. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.savePartition(CosmosDBSpark.scala:439)
  108. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.com$microsoft$azure$cosmosdb$spark$CosmosDBSpark$$savePartition(CosmosDBSpark.scala:395)
  109. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:186)
  110. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:177)
  111. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  112. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  113. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  114. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  115. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  116. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  117. at org.apache.spark.scheduler.Task.run(Task.scala:108)
  118. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
  119. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  120. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  121. at java.lang.Thread.run(Thread.java:748)
  122.  
  123. Driver stacktrace:
  124. at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
  125. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
  126. at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
  127. at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  128. at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  129. at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
  130. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  131. at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  132. at scala.Option.foreach(Option.scala:257)
  133. at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
  134. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1732)
  135. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1687)
  136. at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1676)
  137. at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  138. at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
  139. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
  140. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2050)
  141. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2069)
  142. at org.apache.spark.SparkContext.runJob(SparkContext.scala:2094)
  143. at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936)
  144. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  145. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  146. at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
  147. at org.apache.spark.rdd.RDD.collect(RDD.scala:935)
  148. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.save(CosmosDBSpark.scala:187)
  149. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.save(CosmosDBSpark.scala:472)
  150. at com.microsoft.azure.cosmosdb.spark.DefaultSource.createRelation(DefaultSource.scala:69)
  151. at org.apache.spark.sql.execution.datasources.DataSource.write(DataSource.scala:471)
  152. at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:50)
  153. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
  154. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
  155. at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  156. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
  157. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
  158. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
  159. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  160. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
  161. at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
  162. at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
  163. at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
  164. at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:609)
  165. at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:233)
  166. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  167. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  168. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  169. at java.lang.reflect.Method.invoke(Method.java:498)
  170. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  171. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  172. at py4j.Gateway.invoke(Gateway.java:280)
  173. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  174. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  175. at py4j.GatewayConnection.run(GatewayConnection.java:214)
  176. at java.lang.Thread.run(Thread.java:748)
  177. Caused by: java.lang.NoClassDefFoundError: com/microsoft/azure/documentdb/bulkexecutor/DocumentBulkExecutor
  178. at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.getDocumentBulkImporter(CosmosDBConnection.scala:93)
  179. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.bulkImport(CosmosDBSpark.scala:238)
  180. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.savePartition(CosmosDBSpark.scala:439)
  181. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.com$microsoft$azure$cosmosdb$spark$CosmosDBSpark$$savePartition(CosmosDBSpark.scala:395)
  182. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:186)
  183. at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$$anonfun$1.apply(CosmosDBSpark.scala:177)
  184. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  185. at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
  186. at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
  187. at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
  188. at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
  189. at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
  190. at org.apache.spark.scheduler.Task.run(Task.scala:108)
  191. at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
  192. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  193. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  194. ... 1 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement