Guest User

Untitled

a guest
Sep 19th, 2018
84
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 10.30 KB | None | 0 0
  1. Exception in thread "main" org.apache.spark.sql.AnalysisException: cannot resolve '`probability`' given input columns: [parch, key, name_2-stagesApplied_OPVector_00000000000f, age_1-stagesApplied_OPVector_00000000000d, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019, sex, sibSp, name_4-stagesApplied_OPVector_000000000011, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013, embarked, name_2-stagesApplied_OPVector_000000000010, name, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c, cabin, survived, age, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_11-stagesApplied_OPVector_00000000001a, name_1-stagesApplied_TextList_00000000000e, pClass, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_9-stagesApplied_RealNN_00000000001b, ticket];;
  2. 'Project ['probability, cast(survived#1067 as double) AS survived#1314]
  3. +- Project [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_11-stagesApplied_OPVector_00000000001a#1214, UDF(age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192) AS age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_9-stagesApplied_RealNN_00000000001b#1237]
  4. +- Project [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192, UDF(age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192) AS age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_11-stagesApplied_OPVector_00000000001a#1214]
  5. +- Project [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076, UDF(age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076) AS age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192]
  6. +- LogicalRDD [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076]
  7.  
  8. at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
  9. at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:88)
  10. at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:85)
  11. at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:289)
  12. at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:289)
  13. at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
  14. at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:288)
  15. at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:268)
  16. at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:268)
  17. at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpression$1(QueryPlan.scala:279)
  18. at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:289)
  19. at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:293)
  20. at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  21. at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  22. at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  23. at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  24. at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
  25. at scala.collection.AbstractTraversable.map(Traversable.scala:104)
  26. at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:293)
  27. at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$6.apply(QueryPlan.scala:298)
  28. at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
  29. at org.apache.spark.sql.catalyst.plans.QueryPlan.mapExpressions(QueryPlan.scala:298)
  30. at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsUp(QueryPlan.scala:268)
  31. at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:85)
  32. at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:78)
  33. at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:127)
  34. at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.checkAnalysis(CheckAnalysis.scala:78)
  35. at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:91)
  36. at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:52)
  37. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:67)
  38. at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withPlan(Dataset.scala:2884)
  39. at org.apache.spark.sql.Dataset.select(Dataset.scala:1150)
  40. at com.salesforce.op.evaluators.OpBinaryClassificationEvaluator.evaluateAll(OpBinaryClassificationEvaluator.scala:104)
  41. at com.salesforce.op.evaluators.OpBinaryClassificationEvaluator.evaluateAll(OpBinaryClassificationEvaluator.scala:54)
  42. at com.salesforce.op.OpWorkflowModel.com$salesforce$op$OpWorkflowModel$$saveScores(OpWorkflowModel.scala:388)
  43. at com.salesforce.op.OpWorkflowModel$$anonfun$scoreFn$2.apply(OpWorkflowModel.scala:347)
  44. at com.salesforce.op.OpWorkflowModel$$anonfun$scoreFn$2.apply(OpWorkflowModel.scala:339)
  45. at com.salesforce.op.OpWorkflowModel.scoreAndEvaluate(OpWorkflowModel.scala:306)
  46. at com.salesforce.op.OpWorkflowModel.evaluate(OpWorkflowModel.scala:321)
  47. at com.salesforce.op.OpWorkflowRunner.evaluate(OpWorkflowRunner.scala:280)
  48. at com.salesforce.op.OpWorkflowRunner.run(OpWorkflowRunner.scala:311)
  49. at com.salesforce.op.OpAppWithRunner.run(OpApp.scala:211)
  50. at com.salesforce.op.OpApp.main(OpApp.scala:182)
  51. at com.salesforce.hw.titanic.OpTitanic.main(OpTitanic.scala)
  52. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  53. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  54. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  55. at java.lang.reflect.Method.invoke(Method.java:498)
  56. at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
  57. at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
  58. at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
  59. at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
  60. at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
  61. 18/09/20 03:09:04 INFO OpWorkflowRunner: Total run time: 15.411s
  62.  
  63. FAILURE: Build failed with an exception.
  64.  
  65. * What went wrong:
  66. Execution failed for task ':sparkSubmit'.
  67. > Process 'command '/Users/yodha34/spark-2.2.1-bin-hadoop2.7/bin/spark-submit'' finished with non-zero exit value 1
  68.  
  69. * Try:
  70. Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
  71.  
  72. * Get more help at https://help.gradle.org
  73.  
  74. BUILD FAILED in 28s
Add Comment
Please, Sign In to add comment