Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Spark job has below error log
- [2018-04-19T06:59:58.528Z] oracle.kv.spark.rdd.KVStoreRDD DEBUG: Generated SQL statement: SELECT stringValue,entityId,numericValue,id,booleanValue,paramsHash FROM fm_metrics_0 WHERE (eventTime = 0) AND (paramsHash = 'a2770969-c827-30f2-910f-6179418462df') AND ((id = 'e2668020-3b1f-3b61-8e9b-23d43fa65234') OR (id = '171bd431-7566-3414-ad41-315c46507fbf')) for columns [stringValue,entityId,numericValue,id,booleanValue,paramsHash] and filters [EqualTo(eventTime,0),EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df),Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- [2018-04-19T06:59:58.905Z] oracle.kv.spark.rdd.KVStoreRDD DEBUG: Generated SQL statement: SELECT stringValue,entityId,numericValue,id,booleanValue,paramsHash FROM fm_metrics_0 WHERE (eventTime = 0) AND (paramsHash = 'a2770969-c827-30f2-910f-6179418462df') AND ((id = 'e2668020-3b1f-3b61-8e9b-23d43fa65234') OR (id = '171bd431-7566-3414-ad41-315c46507fbf')) for columns [stringValue,entityId,numericValue,id,booleanValue,paramsHash] and filters [EqualTo(eventTime,0),EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df),Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenAggregate(key=[], functions=[(count(1),mode=Final,isDistinct=false)], output=[_c0#278,result#275L])
- +- TungstenExchange SinglePartition, None
- +- TungstenAggregate(key=[], functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#286L])
- +- Project
- +- SortMergeJoin [entityId#95], [ID#210]
- :- Sort [entityId#95 ASC], false, 0
- : +- Project [entityId#95]
- : +- Filter ((entityMetrics#274[7c2aabcc-474c-36d5-a684-09db26387f3d].numericValue < 8.502857022092718) && (entityMetrics#274[77f273b2-cd5b-3e00-98fe-f8c97cfcd85e].booleanValue = true))
- : +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Final,isDistinct=false)], output=[entityId#95,entityMetrics#274])
- : +- ConvertToSafe
- : +- Sort [entityId#95 ASC], false, 0
- : +- TungstenExchange hashpartitioning(entityId#95,200), None
- : +- ConvertToUnsafe
- : +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Partial,isDistinct=false)], output=[entityId#95,map#277])
- : +- ConvertToSafe
- : +- Sort [entityId#95 ASC], false, 0
- : +- Project [stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93]
- : +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- : +- Scan oracle.kv.spark.KVStoreAvroRelation@5c1bf28a[stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- +- Sort [ID#210 ASC], false, 0
- +- TungstenExchange hashpartitioning(ID#210,200), None
- +- Project [ID#210]
- +- Filter (STATUS#222 = STOPPED)
- +- MetricsSparkPlan CommonDatabaseLink|DbaasDataLink|R
- +- Scan JDBCRelation(jdbc:oracle:thin:iot/oracle@//database:1521/orcl,(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE),[Lorg.apache.spark.Partition;@e2f9484,{url=jdbc:oracle:thin:iot/oracle@//database:1521/orcl, dbtable=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE), driver=oracle.jdbc.driver.OracleDriver, table-name=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE)})[ID#210,STATUS#222] PushedFilters: [EqualTo(STATUS,STOPPED)]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:80)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.ConvertToSafe.doExecute(rowFormatConverters.scala:56)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:187)
- at org.apache.spark.sql.execution.Limit.executeCollect(basicOperators.scala:165)
- at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:174)
- at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)
- at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)
- at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:56)
- at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:2086)
- at org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$execute$1(DataFrame.scala:1498)
- at org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$collect(DataFrame.scala:1505)
- at org.apache.spark.sql.DataFrame$$anonfun$head$1.apply(DataFrame.scala:1375)
- at org.apache.spark.sql.DataFrame$$anonfun$head$1.apply(DataFrame.scala:1374)
- at org.apache.spark.sql.DataFrame.withCallback(DataFrame.scala:2099)
- at org.apache.spark.sql.DataFrame.head(DataFrame.scala:1374)
- at org.apache.spark.sql.DataFrame.take(DataFrame.scala:1456)
- at org.apache.spark.sql.DataFrame.showString(DataFrame.scala:170)
- at org.apache.spark.sql.DataFrame.show(DataFrame.scala:350)
- at org.apache.spark.sql.DataFrame.show(DataFrame.scala:311)
- at org.apache.spark.sql.DataFrame.show(DataFrame.scala:319)
- at com.oracle.iot.apps.common.spark.utils.DynamicProcessorSparkUtils.doCompute(DynamicProcessorSparkUtils.java:90)
- at com.oracle.iot.fm.processor.CAS_DynamicProcessor.compute(CAS_DynamicProcessor.java:184)
- at com.oracle.iot.fm.processor.CAS_DynamicProcessor.execute(CAS_DynamicProcessor.java:142)
- at com.oracle.iot.fm.processor.CAS_DynamicProcessor.execute(CAS_DynamicProcessor.java:46)
- at com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl$BatchTxFlowHandler.lambda$execute$0(TxFlowManagerImpl.java:517)
- at com.oracle.bacs.bootstrap.scope.AnalyticsProcessorScope.doWithAnalyticsProcessor(AnalyticsProcessorScope.java:89)
- at com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl$BatchTxFlowHandler.execute(TxFlowManagerImpl.java:515)
- at com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl.execute(TxFlowManagerImpl.java:239)
- at com.oracle.bacs.bootstrap.application.rest.BatchTxFlowExecutor.lambda$submitInternal$0(BatchTxFlowExecutor.java:284)
- at java.util.concurrent.FutureTask.run(FutureTask.java:266)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
- at java.lang.Thread.run(Thread.java:745)
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenExchange SinglePartition, None
- +- TungstenAggregate(key=[], functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#286L])
- +- Project
- +- SortMergeJoin [entityId#95], [ID#210]
- :- Sort [entityId#95 ASC], false, 0
- : +- Project [entityId#95]
- : +- Filter ((entityMetrics#274[7c2aabcc-474c-36d5-a684-09db26387f3d].numericValue < 8.502857022092718) && (entityMetrics#274[77f273b2-cd5b-3e00-98fe-f8c97cfcd85e].booleanValue = true))
- : +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Final,isDistinct=false)], output=[entityId#95,entityMetrics#274])
- : +- ConvertToSafe
- : +- Sort [entityId#95 ASC], false, 0
- : +- TungstenExchange hashpartitioning(entityId#95,200), None
- : +- ConvertToUnsafe
- : +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Partial,isDistinct=false)], output=[entityId#95,map#277])
- : +- ConvertToSafe
- : +- Sort [entityId#95 ASC], false, 0
- : +- Project [stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93]
- : +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- : +- Scan oracle.kv.spark.KVStoreAvroRelation@5c1bf28a[stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- +- Sort [ID#210 ASC], false, 0
- +- TungstenExchange hashpartitioning(ID#210,200), None
- +- Project [ID#210]
- +- Filter (STATUS#222 = STOPPED)
- +- MetricsSparkPlan CommonDatabaseLink|DbaasDataLink|R
- +- Scan JDBCRelation(jdbc:oracle:thin:iot/oracle@//database:1521/orcl,(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE),[Lorg.apache.spark.Partition;@e2f9484,{url=jdbc:oracle:thin:iot/oracle@//database:1521/orcl, dbtable=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE), driver=oracle.jdbc.driver.OracleDriver, table-name=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE)})[ID#210,STATUS#222] PushedFilters: [EqualTo(STATUS,STOPPED)]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:247)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:86)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:80)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 41 more
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenAggregate(key=[], functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#286L])
- +- Project
- +- SortMergeJoin [entityId#95], [ID#210]
- :- Sort [entityId#95 ASC], false, 0
- : +- Project [entityId#95]
- : +- Filter ((entityMetrics#274[7c2aabcc-474c-36d5-a684-09db26387f3d].numericValue < 8.502857022092718) && (entityMetrics#274[77f273b2-cd5b-3e00-98fe-f8c97cfcd85e].booleanValue = true))
- : +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Final,isDistinct=false)], output=[entityId#95,entityMetrics#274])
- : +- ConvertToSafe
- : +- Sort [entityId#95 ASC], false, 0
- : +- TungstenExchange hashpartitioning(entityId#95,200), None
- : +- ConvertToUnsafe
- : +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Partial,isDistinct=false)], output=[entityId#95,map#277])
- : +- ConvertToSafe
- : +- Sort [entityId#95 ASC], false, 0
- : +- Project [stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93]
- : +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- : +- Scan oracle.kv.spark.KVStoreAvroRelation@5c1bf28a[stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- +- Sort [ID#210 ASC], false, 0
- +- TungstenExchange hashpartitioning(ID#210,200), None
- +- Project [ID#210]
- +- Filter (STATUS#222 = STOPPED)
- +- MetricsSparkPlan CommonDatabaseLink|DbaasDataLink|R
- +- Scan JDBCRelation(jdbc:oracle:thin:iot/oracle@//database:1521/orcl,(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE),[Lorg.apache.spark.Partition;@e2f9484,{url=jdbc:oracle:thin:iot/oracle@//database:1521/orcl, dbtable=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE), driver=oracle.jdbc.driver.OracleDriver, table-name=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE)})[ID#210,STATUS#222] PushedFilters: [EqualTo(STATUS,STOPPED)]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:80)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:164)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:254)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:248)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 49 more
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Final,isDistinct=false)], output=[entityId#95,entityMetrics#274])
- +- ConvertToSafe
- +- Sort [entityId#95 ASC], false, 0
- +- TungstenExchange hashpartitioning(entityId#95,200), None
- +- ConvertToUnsafe
- +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Partial,isDistinct=false)], output=[entityId#95,map#277])
- +- ConvertToSafe
- +- Sort [entityId#95 ASC], false, 0
- +- Project [stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93]
- +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- +- Scan oracle.kv.spark.KVStoreAvroRelation@5c1bf28a[stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.aggregate.SortBasedAggregate.doExecute(SortBasedAggregate.scala:69)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Filter.doExecute(basicOperators.scala:70)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Project.doExecute(basicOperators.scala:46)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Sort.doExecute(Sort.scala:64)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.joins.SortMergeJoin.doExecute(SortMergeJoin.scala:70)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Project.doExecute(basicOperators.scala:46)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:86)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:80)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 58 more
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenExchange hashpartitioning(entityId#95,200), None
- +- ConvertToUnsafe
- +- SortBasedAggregate(key=[entityId#95], functions=[(MapMetricResults(id#92,paramsHash#93,numericValue#102,booleanValue#103,stringValue#104),mode=Partial,isDistinct=false)], output=[entityId#95,map#277])
- +- ConvertToSafe
- +- Sort [entityId#95 ASC], false, 0
- +- Project [stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93]
- +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- +- Scan oracle.kv.spark.KVStoreAvroRelation@5c1bf28a[stringValue#104,entityId#95,numericValue#102,id#92,booleanValue#103,paramsHash#93] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:247)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Sort.doExecute(Sort.scala:64)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.ConvertToSafe.doExecute(rowFormatConverters.scala:56)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.aggregate.SortBasedAggregate$$anonfun$doExecute$1.apply(SortBasedAggregate.scala:72)
- at org.apache.spark.sql.execution.aggregate.SortBasedAggregate$$anonfun$doExecute$1.apply(SortBasedAggregate.scala:69)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 91 more
- Caused by: java.lang.IllegalArgumentException: Invalid KVStoreConfig. Request timeout: 45,000 ms exceeds socket read timeout: 30,000 ms
- at oracle.kv.KVStoreFactory.getStoreInternal(KVStoreFactory.java:159)
- at oracle.kv.KVStoreFactory.getStore(KVStoreFactory.java:122)
- at oracle.kv.KVStoreFactory.getStore(KVStoreFactory.java:59)
- at oracle.kv.spark.utils.ConnectionDef$$anonfun$createConnection$4.apply(ConnectionDef.scala:40)
- at oracle.kv.spark.utils.ConnectionDef$$anonfun$createConnection$4.apply(ConnectionDef.scala:40)
- at scala.util.Try$.apply(Try.scala:161)
- at oracle.kv.spark.utils.ConnectionDef.createConnection(ConnectionDef.scala:40)
- at oracle.kv.spark.rdd.KVStoreRDD.computePartitions(KVStoreRDD.scala:155)
- at oracle.kv.spark.rdd.KVStoreRDD.getPartitions(KVStoreRDD.scala:152)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.bacs.metrics.MetricsRDD.getPartitions(MetricsRDD.scala:33)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.ShuffleDependency.<init>(Dependency.scala:91)
- at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:220)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:254)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:248)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 109 more
- [2018-04-19T07:00:00.103Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx969325827_rvxf.user" from Hadoop...
- [2018-04-19T07:00:00.104Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx969325827_rvxf.password" from Hadoop...
- [2018-04-19T07:00:00.155Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx1950214322_nsx2.user" from Hadoop...
- [2018-04-19T07:00:00.156Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx1950214322_nsx2.password" from Hadoop...
- [2018-04-19T07:00:00.208Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx1646181608_4f3n.user" from Hadoop...
- [2018-04-19T07:00:00.208Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx1646181608_4f3n.password" from Hadoop...
- [2018-04-19T07:00:00.273Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx958504679_sxio.user" from Hadoop...
- [2018-04-19T07:00:00.273Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx958504679_sxio.password" from Hadoop...
- [2018-04-19T07:00:00.342Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.xdbaasx1116648067_l74a.user" from Hadoop...
- [2018-04-19T07:00:00.342Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.xdbaasx1116648067_l74a.password" from Hadoop...
- [2018-04-19T07:00:00.383Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx2077748762_ak2z.user" from Hadoop...
- [2018-04-19T07:00:00.383Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.x_dbaasx2077748762_ak2z.password" from Hadoop...
- [2018-04-19T07:00:00.416Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.user" from Hadoop...
- [2018-04-19T07:00:00.417Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.password" from Hadoop...
- [2018-04-19T07:00:00.483Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.user" from Hadoop...
- [2018-04-19T07:00:00.483Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.password" from Hadoop...
- [2018-04-19T07:00:00.546Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.user" from Hadoop...
- [2018-04-19T07:00:00.546Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.password" from Hadoop...
- [2018-04-19T07:00:00.594Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.user" from Hadoop...
- [2018-04-19T07:00:00.594Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.cred.HadoopCredentialService, appId: 0-AD, du: 1, message: Retrieving credential "oracle.iot.tenant.0-ad_0-ad.commondatabaselink_5s5x.password" from Hadoop...
- [2018-04-19T07:00:00.861Z] oracle.kv.spark.rdd.KVStoreRDD DEBUG: Generated SQL statement: SELECT entityId,stringValue,numericValue,booleanValue,id,paramsHash FROM fm_metrics_0 WHERE (eventTime = 0) AND (paramsHash = 'a2770969-c827-30f2-910f-6179418462df') AND ((id = 'e2668020-3b1f-3b61-8e9b-23d43fa65234') OR (id = '171bd431-7566-3414-ad41-315c46507fbf')) for columns [entityId,stringValue,numericValue,booleanValue,id,paramsHash] and filters [EqualTo(eventTime,0),EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df),Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenAggregate(key=[], functions=[(count(1),mode=Final,isDistinct=false)], output=[_c0#551,result#548L])
- +- TungstenExchange SinglePartition, None
- +- TungstenAggregate(key=[], functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#555L])
- +- Project
- +- SortMergeJoin [entityId#368], [ID#483]
- :- Sort [entityId#368 ASC], false, 0
- : +- Project [entityId#368]
- : +- Filter ((entityMetrics#547[7c2aabcc-474c-36d5-a684-09db26387f3d].numericValue < 8.502857022092718) && (entityMetrics#547[77f273b2-cd5b-3e00-98fe-f8c97cfcd85e].booleanValue = true))
- : +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Final,isDistinct=false)], output=[entityId#368,entityMetrics#547])
- : +- ConvertToSafe
- : +- Sort [entityId#368 ASC], false, 0
- : +- TungstenExchange hashpartitioning(entityId#368,200), None
- : +- ConvertToUnsafe
- : +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Partial,isDistinct=false)], output=[entityId#368,map#550])
- : +- ConvertToSafe
- : +- Sort [entityId#368 ASC], false, 0
- : +- Project [entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366]
- : +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- : +- Scan oracle.kv.spark.KVStoreAvroRelation@453842ac[entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- +- Sort [ID#483 ASC], false, 0
- +- TungstenExchange hashpartitioning(ID#483,200), None
- +- Project [ID#483]
- +- Filter (STATUS#495 = STOPPED)
- +- MetricsSparkPlan CommonDatabaseLink|DbaasDataLink|R
- +- Scan JDBCRelation(jdbc:oracle:thin:iot/oracle@//database:1521/orcl,(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE),[Lorg.apache.spark.Partition;@3f2ca11a,{url=jdbc:oracle:thin:iot/oracle@//database:1521/orcl, dbtable=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE), driver=oracle.jdbc.driver.OracleDriver, table-name=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE)})[ID#483,STATUS#495] PushedFilters: [EqualTo(STATUS,STOPPED)]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:80)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)
- at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)
- at org.apache.spark.sql.DataFrame.rdd$lzycompute(DataFrame.scala:1637)
- at org.apache.spark.sql.DataFrame.rdd(DataFrame.scala:1634)
- at org.apache.spark.sql.DataFrame.toJavaRDD(DataFrame.scala:1648)
- at com.oracle.iot.apps.common.spark.utils.DynamicProcessorSparkUtils.saveDQResultsToBOVS(DynamicProcessorSparkUtils.java:134)
- at com.oracle.iot.fm.processor.CAS_DynamicProcessor.execute(CAS_DynamicProcessor.java:143)
- at com.oracle.iot.fm.processor.CAS_DynamicProcessor.execute(CAS_DynamicProcessor.java:46)
- at com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl$BatchTxFlowHandler.lambda$execute$0(TxFlowManagerImpl.java:517)
- at com.oracle.bacs.bootstrap.scope.AnalyticsProcessorScope.doWithAnalyticsProcessor(AnalyticsProcessorScope.java:89)
- at com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl$BatchTxFlowHandler.execute(TxFlowManagerImpl.java:515)
- at com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl.execute(TxFlowManagerImpl.java:239)
- at com.oracle.bacs.bootstrap.application.rest.BatchTxFlowExecutor.lambda$submitInternal$0(BatchTxFlowExecutor.java:284)
- at java.util.concurrent.FutureTask.run(FutureTask.java:266)
- at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
- at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
- at java.lang.Thread.run(Thread.java:745)
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenExchange SinglePartition, None
- +- TungstenAggregate(key=[], functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#555L])
- +- Project
- +- SortMergeJoin [entityId#368], [ID#483]
- :- Sort [entityId#368 ASC], false, 0
- : +- Project [entityId#368]
- : +- Filter ((entityMetrics#547[7c2aabcc-474c-36d5-a684-09db26387f3d].numericValue < 8.502857022092718) && (entityMetrics#547[77f273b2-cd5b-3e00-98fe-f8c97cfcd85e].booleanValue = true))
- : +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Final,isDistinct=false)], output=[entityId#368,entityMetrics#547])
- : +- ConvertToSafe
- : +- Sort [entityId#368 ASC], false, 0
- : +- TungstenExchange hashpartitioning(entityId#368,200), None
- : +- ConvertToUnsafe
- : +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Partial,isDistinct=false)], output=[entityId#368,map#550])
- : +- ConvertToSafe
- : +- Sort [entityId#368 ASC], false, 0
- : +- Project [entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366]
- : +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- : +- Scan oracle.kv.spark.KVStoreAvroRelation@453842ac[entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- +- Sort [ID#483 ASC], false, 0
- +- TungstenExchange hashpartitioning(ID#483,200), None
- +- Project [ID#483]
- +- Filter (STATUS#495 = STOPPED)
- +- MetricsSparkPlan CommonDatabaseLink|DbaasDataLink|R
- +- Scan JDBCRelation(jdbc:oracle:thin:iot/oracle@//database:1521/orcl,(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE),[Lorg.apache.spark.Partition;@3f2ca11a,{url=jdbc:oracle:thin:iot/oracle@//database:1521/orcl, dbtable=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE), driver=oracle.jdbc.driver.OracleDriver, table-name=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE)})[ID#483,STATUS#495] PushedFilters: [EqualTo(STATUS,STOPPED)]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:247)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:86)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:80)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 22 more
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenAggregate(key=[], functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#555L])
- +- Project
- +- SortMergeJoin [entityId#368], [ID#483]
- :- Sort [entityId#368 ASC], false, 0
- : +- Project [entityId#368]
- : +- Filter ((entityMetrics#547[7c2aabcc-474c-36d5-a684-09db26387f3d].numericValue < 8.502857022092718) && (entityMetrics#547[77f273b2-cd5b-3e00-98fe-f8c97cfcd85e].booleanValue = true))
- : +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Final,isDistinct=false)], output=[entityId#368,entityMetrics#547])
- : +- ConvertToSafe
- : +- Sort [entityId#368 ASC], false, 0
- : +- TungstenExchange hashpartitioning(entityId#368,200), None
- : +- ConvertToUnsafe
- : +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Partial,isDistinct=false)], output=[entityId#368,map#550])
- : +- ConvertToSafe
- : +- Sort [entityId#368 ASC], false, 0
- : +- Project [entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366]
- : +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- : +- Scan oracle.kv.spark.KVStoreAvroRelation@453842ac[entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- +- Sort [ID#483 ASC], false, 0
- +- TungstenExchange hashpartitioning(ID#483,200), None
- +- Project [ID#483]
- +- Filter (STATUS#495 = STOPPED)
- +- MetricsSparkPlan CommonDatabaseLink|DbaasDataLink|R
- +- Scan JDBCRelation(jdbc:oracle:thin:iot/oracle@//database:1521/orcl,(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE),[Lorg.apache.spark.Partition;@3f2ca11a,{url=jdbc:oracle:thin:iot/oracle@//database:1521/orcl, dbtable=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE), driver=oracle.jdbc.driver.OracleDriver, table-name=(SELECT id, name, type_id as type, registration_number as registrationNumber, vin, make, model, year, registration_time, last_reported_time, last_modified_time, last_modified_by as lastModifiedBy, status FROM FM_VEHICLE)})[ID#483,STATUS#495] PushedFilters: [EqualTo(STATUS,STOPPED)]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:80)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:164)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:254)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:248)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 30 more
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Final,isDistinct=false)], output=[entityId#368,entityMetrics#547])
- +- ConvertToSafe
- +- Sort [entityId#368 ASC], false, 0
- +- TungstenExchange hashpartitioning(entityId#368,200), None
- +- ConvertToUnsafe
- +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Partial,isDistinct=false)], output=[entityId#368,map#550])
- +- ConvertToSafe
- +- Sort [entityId#368 ASC], false, 0
- +- Project [entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366]
- +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- +- Scan oracle.kv.spark.KVStoreAvroRelation@453842ac[entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.aggregate.SortBasedAggregate.doExecute(SortBasedAggregate.scala:69)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Filter.doExecute(basicOperators.scala:70)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Project.doExecute(basicOperators.scala:46)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Sort.doExecute(Sort.scala:64)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.joins.SortMergeJoin.doExecute(SortMergeJoin.scala:70)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Project.doExecute(basicOperators.scala:46)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:86)
- at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:80)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 39 more
- Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
- TungstenExchange hashpartitioning(entityId#368,200), None
- +- ConvertToUnsafe
- +- SortBasedAggregate(key=[entityId#368], functions=[(MapMetricResults(id#365,paramsHash#366,numericValue#375,booleanValue#376,stringValue#377),mode=Partial,isDistinct=false)], output=[entityId#368,map#550])
- +- ConvertToSafe
- +- Sort [entityId#368 ASC], false, 0
- +- Project [entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366]
- +- MetricsSparkPlan FM_CommonOutput_NoSql_Link|NoSqlDataLink|R
- +- Scan oracle.kv.spark.KVStoreAvroRelation@453842ac[entityId#368,stringValue#377,numericValue#375,booleanValue#376,id#365,paramsHash#366] PushedFilters: [EqualTo(eventTime,0), EqualTo(paramsHash,a2770969-c827-30f2-910f-6179418462df), Or(EqualTo(id,e2668020-3b1f-3b61-8e9b-23d43fa65234),EqualTo(id,171bd431-7566-3414-ad41-315c46507fbf))]
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
- at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:247)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.Sort.doExecute(Sort.scala:64)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.ConvertToSafe.doExecute(rowFormatConverters.scala:56)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
- at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
- at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
- at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
- at org.apache.spark.sql.execution.aggregate.SortBasedAggregate$$anonfun$doExecute$1.apply(SortBasedAggregate.scala:72)
- at org.apache.spark.sql.execution.aggregate.SortBasedAggregate$$anonfun$doExecute$1.apply(SortBasedAggregate.scala:69)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 72 more
- Caused by: java.lang.IllegalArgumentException: Invalid KVStoreConfig. Request timeout: 45,000 ms exceeds socket read timeout: 30,000 ms
- at oracle.kv.KVStoreFactory.getStoreInternal(KVStoreFactory.java:159)
- at oracle.kv.KVStoreFactory.getStore(KVStoreFactory.java:122)
- at oracle.kv.KVStoreFactory.getStore(KVStoreFactory.java:59)
- at oracle.kv.spark.utils.ConnectionDef$$anonfun$createConnection$4.apply(ConnectionDef.scala:40)
- at oracle.kv.spark.utils.ConnectionDef$$anonfun$createConnection$4.apply(ConnectionDef.scala:40)
- at scala.util.Try$.apply(Try.scala:161)
- at oracle.kv.spark.utils.ConnectionDef.createConnection(ConnectionDef.scala:40)
- at oracle.kv.spark.rdd.KVStoreRDD.computePartitions(KVStoreRDD.scala:155)
- at oracle.kv.spark.rdd.KVStoreRDD.getPartitions(KVStoreRDD.scala:152)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.bacs.metrics.MetricsRDD.getPartitions(MetricsRDD.scala:33)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
- at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
- at org.apache.spark.ShuffleDependency.<init>(Dependency.scala:91)
- at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:220)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:254)
- at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:248)
- at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
- ... 90 more
- [2018-04-19T07:00:57.488Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl, appId: 0-AD, du: 1, message: Stopping analytics processor "CAS_DynamicProcessor"...
- [2018-04-19T07:00:57.489Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl, appId: 0-AD, du: 1, message: Analytics processor "CAS_DynamicProcessor" stopped.
- [2018-04-19T07:00:57.503Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.application.txflow.TxFlowManagerImpl, appId: 0-AD, du: 1, message: Analytics processors stopped.
- [2018-04-19T07:00:57.504Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.application.ApplicationManager, appId: 0-AD, du: 1, message: TxFlow manager has been asked to stop
- [2018-04-19T07:00:57.504Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.application.ApplicationManager, appId: 0-AD, du: 1, message: Stopping application manager
- [2018-04-19T07:00:57.505Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.JettyServerController, appId: 0-AD, du: 1, message: Stopping jetty controller
- [2018-04-19T07:00:57.582Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.application.Main, appId: 0-AD, du: 1, message: Application manager run finished
- [2018-04-19T07:00:57.590Z] oracle.IoT.ApplicationMonitor.Analytics INFO: className: com.oracle.bacs.bootstrap.metrics.AppMetricsRecorder, appId: 0-AD, du: 1, message: Stopping AppMetricsRecorder
- [2018-04-19T07:00:57.609Z] oracle.IoT.ApplicationMonitor.Analytics INFO: className: com.oracle.bacs.bootstrap.metrics.AppMetricsRecorder, appId: 0-AD, du: 1, message: AppMetricsRecorder stopped
- [2018-04-19T07:00:57.622Z] oracle.IoT.Bootstrap.Analytics INFO: className: com.oracle.bacs.bootstrap.application.utils.JavaSparkContextFactory, appId: 0-AD, du: 1, message: Closing JavaSparkContext "application_1524118654455_0004".
Add Comment
Please, Sign In to add comment