Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- An error was encountered:
- An error occurred while calling o96.toJSON.
- : org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 4 times, most recent failure: Lost task 0.3 in stage 4.0 (TID 7, wn2-MDMstr.zxmmgisclg5udfemnv0v3qva3e.ax.internal.cloudapp.net, executor 1): java.lang.IllegalStateException: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:86)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:33)
- at com.microsoft.azure.documentdb.internal.query.ProxyQueryExecutionContext.<init>(ProxyQueryExecutionContext.java:68)
- at com.microsoft.azure.documentdb.internal.query.QueryExecutionContextFactory.createQueryExecutionContext(QueryExecutionContextFactory.java:23)
- at com.microsoft.azure.documentdb.QueryIterable.createQueryExecutionContext(QueryIterable.java:70)
- at com.microsoft.azure.documentdb.QueryIterable.reset(QueryIterable.java:115)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:57)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:31)
- at com.microsoft.azure.documentdb.DocumentClient.readDocuments(DocumentClient.java:1002)
- at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.readDocuments(CosmosDBConnection.scala:205)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.queryDocuments$1(CosmosDBRDDIterator.scala:192)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader$lzycompute(CosmosDBRDDIterator.scala:321)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader(CosmosDBRDDIterator.scala:137)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.hasNext(CosmosDBRDDIterator.scala:334)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)
- at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:395)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.util.random.SamplingUtils$.reservoirSampleAndCount(SamplingUtils.scala:41)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:263)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:261)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
- at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
- at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
- at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
- at org.apache.spark.scheduler.Task.run(Task.scala:108)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
- at java.lang.Thread.run(Thread.java:748)
- Caused by: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.ErrorUtils.maybeThrowException(ErrorUtils.java:69)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.performGetRequest(GatewayProxy.java:245)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.doReadFeed(GatewayProxy.java:111)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.processMessage(GatewayProxy.java:346)
- at com.microsoft.azure.documentdb.DocumentClient$9.apply(DocumentClient.java:3011)
- at com.microsoft.azure.documentdb.internal.RetryUtility.executeDocumentClientRequest(RetryUtility.java:58)
- at com.microsoft.azure.documentdb.DocumentClient.doReadFeed(DocumentClient.java:3021)
- at com.microsoft.azure.documentdb.DocumentQueryClientInternal.doReadFeed(DocumentQueryClientInternal.java:36)
- at com.microsoft.azure.documentdb.internal.query.AbstractQueryExecutionContext.executeRequest(AbstractQueryExecutionContext.java:215)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.executeOnce(DefaultQueryExecutionContext.java:137)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.fillBuffer(DefaultQueryExecutionContext.java:101)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:84)
- ... 34 more
- Driver stacktrace:
- at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
- at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
- at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
- at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
- at scala.Option.foreach(Option.scala:257)
- at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
- at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1732)
- at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1687)
- at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1676)
- at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
- at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2050)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2069)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2094)
- at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
- at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
- at org.apache.spark.rdd.RDD.collect(RDD.scala:935)
- at org.apache.spark.RangePartitioner$.sketch(Partitioner.scala:266)
- at org.apache.spark.RangePartitioner.<init>(Partitioner.scala:128)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange$.prepareShuffleDependency(ShuffleExchange.scala:221)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:87)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:124)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:115)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:115)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
- at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
- at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:252)
- at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121)
- at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
- at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
- at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
- at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
- at org.apache.spark.sql.Dataset.toJSON(Dataset.scala:2743)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
- at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
- at py4j.Gateway.invoke(Gateway.java:280)
- at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
- at py4j.commands.CallCommand.execute(CallCommand.java:79)
- at py4j.GatewayConnection.run(GatewayConnection.java:214)
- at java.lang.Thread.run(Thread.java:748)
- Caused by: java.lang.IllegalStateException: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:86)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:33)
- at com.microsoft.azure.documentdb.internal.query.ProxyQueryExecutionContext.<init>(ProxyQueryExecutionContext.java:68)
- at com.microsoft.azure.documentdb.internal.query.QueryExecutionContextFactory.createQueryExecutionContext(QueryExecutionContextFactory.java:23)
- at com.microsoft.azure.documentdb.QueryIterable.createQueryExecutionContext(QueryIterable.java:70)
- at com.microsoft.azure.documentdb.QueryIterable.reset(QueryIterable.java:115)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:57)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:31)
- at com.microsoft.azure.documentdb.DocumentClient.readDocuments(DocumentClient.java:1002)
- at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.readDocuments(CosmosDBConnection.scala:205)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.queryDocuments$1(CosmosDBRDDIterator.scala:192)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader$lzycompute(CosmosDBRDDIterator.scala:321)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader(CosmosDBRDDIterator.scala:137)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.hasNext(CosmosDBRDDIterator.scala:334)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)
- at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:395)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.util.random.SamplingUtils$.reservoirSampleAndCount(SamplingUtils.scala:41)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:263)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:261)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
- at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
- at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
- at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
- at org.apache.spark.scheduler.Task.run(Task.scala:108)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
- ... 1 more
- Caused by: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.ErrorUtils.maybeThrowException(ErrorUtils.java:69)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.performGetRequest(GatewayProxy.java:245)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.doReadFeed(GatewayProxy.java:111)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.processMessage(GatewayProxy.java:346)
- at com.microsoft.azure.documentdb.DocumentClient$9.apply(DocumentClient.java:3011)
- at com.microsoft.azure.documentdb.internal.RetryUtility.executeDocumentClientRequest(RetryUtility.java:58)
- at com.microsoft.azure.documentdb.DocumentClient.doReadFeed(DocumentClient.java:3021)
- at com.microsoft.azure.documentdb.DocumentQueryClientInternal.doReadFeed(DocumentQueryClientInternal.java:36)
- at com.microsoft.azure.documentdb.internal.query.AbstractQueryExecutionContext.executeRequest(AbstractQueryExecutionContext.java:215)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.executeOnce(DefaultQueryExecutionContext.java:137)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.fillBuffer(DefaultQueryExecutionContext.java:101)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:84)
- ... 34 more
- Traceback (most recent call last):
- File "/usr/hdp/current/spark2-client/python/pyspark/sql/dataframe.py", line 113, in toJSON
- rdd = self._jdf.toJSON()
- File "/usr/hdp/current/spark2-client/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1133, in __call__
- answer, self.gateway_client, self.target_id, self.name)
- File "/usr/hdp/current/spark2-client/python/pyspark/sql/utils.py", line 63, in deco
- return f(*a, **kw)
- File "/usr/hdp/current/spark2-client/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py", line 319, in get_return_value
- format(target_id, ".", name), value)
- py4j.protocol.Py4JJavaError: An error occurred while calling o96.toJSON.
- : org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 4 times, most recent failure: Lost task 0.3 in stage 4.0 (TID 7, wn2-MDMstr.zxmmgisclg5udfemnv0v3qva3e.ax.internal.cloudapp.net, executor 1): java.lang.IllegalStateException: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:86)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:33)
- at com.microsoft.azure.documentdb.internal.query.ProxyQueryExecutionContext.<init>(ProxyQueryExecutionContext.java:68)
- at com.microsoft.azure.documentdb.internal.query.QueryExecutionContextFactory.createQueryExecutionContext(QueryExecutionContextFactory.java:23)
- at com.microsoft.azure.documentdb.QueryIterable.createQueryExecutionContext(QueryIterable.java:70)
- at com.microsoft.azure.documentdb.QueryIterable.reset(QueryIterable.java:115)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:57)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:31)
- at com.microsoft.azure.documentdb.DocumentClient.readDocuments(DocumentClient.java:1002)
- at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.readDocuments(CosmosDBConnection.scala:205)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.queryDocuments$1(CosmosDBRDDIterator.scala:192)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader$lzycompute(CosmosDBRDDIterator.scala:321)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader(CosmosDBRDDIterator.scala:137)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.hasNext(CosmosDBRDDIterator.scala:334)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)
- at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:395)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.util.random.SamplingUtils$.reservoirSampleAndCount(SamplingUtils.scala:41)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:263)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:261)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
- at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
- at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
- at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
- at org.apache.spark.scheduler.Task.run(Task.scala:108)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
- at java.lang.Thread.run(Thread.java:748)
- Caused by: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.ErrorUtils.maybeThrowException(ErrorUtils.java:69)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.performGetRequest(GatewayProxy.java:245)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.doReadFeed(GatewayProxy.java:111)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.processMessage(GatewayProxy.java:346)
- at com.microsoft.azure.documentdb.DocumentClient$9.apply(DocumentClient.java:3011)
- at com.microsoft.azure.documentdb.internal.RetryUtility.executeDocumentClientRequest(RetryUtility.java:58)
- at com.microsoft.azure.documentdb.DocumentClient.doReadFeed(DocumentClient.java:3021)
- at com.microsoft.azure.documentdb.DocumentQueryClientInternal.doReadFeed(DocumentQueryClientInternal.java:36)
- at com.microsoft.azure.documentdb.internal.query.AbstractQueryExecutionContext.executeRequest(AbstractQueryExecutionContext.java:215)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.executeOnce(DefaultQueryExecutionContext.java:137)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.fillBuffer(DefaultQueryExecutionContext.java:101)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:84)
- ... 34 more
- Driver stacktrace:
- at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
- at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
- at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
- at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
- at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
- at scala.Option.foreach(Option.scala:257)
- at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
- at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1732)
- at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1687)
- at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1676)
- at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
- at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2050)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2069)
- at org.apache.spark.SparkContext.runJob(SparkContext.scala:2094)
- at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
- at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
- at org.apache.spark.rdd.RDD.collect(RDD.scala:935)
- at org.apache.spark.RangePartitioner$.sketch(Partitioner.scala:266)
- at org.apache.spark.RangePartitioner.<init>(Partitioner.scala:128)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange$.prepareShuffleDependency(ShuffleExchange.scala:221)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:87)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:124)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:115)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
- at org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:115)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
- at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
- at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:252)
- at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121)
- at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
- at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
- at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
- at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
- at org.apache.spark.sql.Dataset.toJSON(Dataset.scala:2743)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
- at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
- at py4j.Gateway.invoke(Gateway.java:280)
- at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
- at py4j.commands.CallCommand.execute(CallCommand.java:79)
- at py4j.GatewayConnection.run(GatewayConnection.java:214)
- at java.lang.Thread.run(Thread.java:748)
- Caused by: java.lang.IllegalStateException: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:86)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:33)
- at com.microsoft.azure.documentdb.internal.query.ProxyQueryExecutionContext.<init>(ProxyQueryExecutionContext.java:68)
- at com.microsoft.azure.documentdb.internal.query.QueryExecutionContextFactory.createQueryExecutionContext(QueryExecutionContextFactory.java:23)
- at com.microsoft.azure.documentdb.QueryIterable.createQueryExecutionContext(QueryIterable.java:70)
- at com.microsoft.azure.documentdb.QueryIterable.reset(QueryIterable.java:115)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:57)
- at com.microsoft.azure.documentdb.QueryIterable.<init>(QueryIterable.java:31)
- at com.microsoft.azure.documentdb.DocumentClient.readDocuments(DocumentClient.java:1002)
- at com.microsoft.azure.cosmosdb.spark.CosmosDBConnection.readDocuments(CosmosDBConnection.scala:205)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.queryDocuments$1(CosmosDBRDDIterator.scala:192)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader$lzycompute(CosmosDBRDDIterator.scala:321)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.reader(CosmosDBRDDIterator.scala:137)
- at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.hasNext(CosmosDBRDDIterator.scala:334)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)
- at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
- at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:395)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
- at org.apache.spark.util.random.SamplingUtils$.reservoirSampleAndCount(SamplingUtils.scala:41)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:263)
- at org.apache.spark.RangePartitioner$$anonfun$9.apply(Partitioner.scala:261)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
- at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
- at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
- at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
- at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
- at org.apache.spark.scheduler.Task.run(Task.scala:108)
- at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
- ... 1 more
- Caused by: com.microsoft.azure.documentdb.DocumentClientException: Message: {"Errors":["Request rate is large"]}
- ActivityId: 859bd9a2-eaef-4c32-acb8-43ce021c28c5, Request URI: /apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, RequestStats:
- ResponseTime: 2018-04-06T16:04:01.3903033Z, StoreReadResult: StorePhysicalAddress: rntbd://10.0.0.104:16700/apps/e01795ed-8f69-47b3-ac45-e1a66f2ec14f/services/568a3c5f-3e58-4f21-b744-8f784b91fc61/partitions/197c818e-4a33-4fa0-8031-6c665e25a592/replicas/131662668080760616p, LSN: 465, GlobalCommittedLsn: 465, PartitionKeyRangeId: , IsValid: True, StatusCode: 0, IsGone: False, IsNotFound: False, IsInvalidPartition: False, RequestCharge: 0.38, ItemLSN: -1, ResourceType: Document, OperationType: ReadFeed
- , SDK: Microsoft.Azure.Documents.Common/1.21.0.0, StatusCode: TooManyRequests
- at com.microsoft.azure.documentdb.internal.ErrorUtils.maybeThrowException(ErrorUtils.java:69)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.performGetRequest(GatewayProxy.java:245)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.doReadFeed(GatewayProxy.java:111)
- at com.microsoft.azure.documentdb.internal.GatewayProxy.processMessage(GatewayProxy.java:346)
- at com.microsoft.azure.documentdb.DocumentClient$9.apply(DocumentClient.java:3011)
- at com.microsoft.azure.documentdb.internal.RetryUtility.executeDocumentClientRequest(RetryUtility.java:58)
- at com.microsoft.azure.documentdb.DocumentClient.doReadFeed(DocumentClient.java:3021)
- at com.microsoft.azure.documentdb.DocumentQueryClientInternal.doReadFeed(DocumentQueryClientInternal.java:36)
- at com.microsoft.azure.documentdb.internal.query.AbstractQueryExecutionContext.executeRequest(AbstractQueryExecutionContext.java:215)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.executeOnce(DefaultQueryExecutionContext.java:137)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.fillBuffer(DefaultQueryExecutionContext.java:101)
- at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.next(DefaultQueryExecutionContext.java:84)
- ... 34 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement