Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Exception in thread "main" org.apache.spark.sql.AnalysisException: cannot resolve '`probability`' given input columns: [parch, key, name_2-stagesApplied_OPVector_00000000000f, age_1-stagesApplied_OPVector_00000000000d, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019, sex, sibSp, name_4-stagesApplied_OPVector_000000000011, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013, embarked, name_2-stagesApplied_OPVector_000000000010, name, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c, cabin, survived, age, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_11-stagesApplied_OPVector_00000000001a, name_1-stagesApplied_TextList_00000000000e, pClass, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_9-stagesApplied_RealNN_00000000001b, ticket];;
- 'Project ['probability, cast(survived#1067 as double) AS survived#1314]
- +- Project [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_11-stagesApplied_OPVector_00000000001a#1214, UDF(age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192) AS age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_9-stagesApplied_RealNN_00000000001b#1237]
- +- Project [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192, UDF(age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192) AS age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_11-stagesApplied_OPVector_00000000001a#1214]
- +- Project [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076, UDF(age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076) AS age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_10-stagesApplied_OPVector_000000000019#1192]
- +- LogicalRDD [key#1058, age#1059, cabin#1060, embarked#1061, name#1062, pClass#1063, parch#1064, sex#1065, sibSp#1066, survived#1067, ticket#1068, name_1-stagesApplied_TextList_00000000000e#1069, name_2-stagesApplied_OPVector_00000000000f#1070, name_2-stagesApplied_OPVector_000000000010#1071, age_1-stagesApplied_OPVector_00000000000d#1072, cabin-embarked-pClass-parch-sex-sibSp-ticket_1-stagesApplied_OPVector_00000000000c#1073, name_4-stagesApplied_OPVector_000000000011#1074, age-cabin-embarked-name-pClass-parch-sex-sibSp-ticket_7-stagesApplied_OPVector_000000000012#1075, age-cabin-embarked-name-pClass-parch-sex-sibSp-survived-ticket_8-stagesApplied_OPVector_000000000013#1076]
- at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
- at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:88)
- at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:85)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:289)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:289)
- at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
- at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:288)
- at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:268)
- at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:268)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpression$1(QueryPlan.scala:279)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:289)
- at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:293)
- at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
- at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
- at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
- at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
- at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
- at scala.collection.AbstractTraversable.map(Traversable.scala:104)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:293)
- at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$6.apply(QueryPlan.scala:298)
- at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.mapExpressions(QueryPlan.scala:298)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsUp(QueryPlan.scala:268)
- at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:85)
- at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:78)
- at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:127)
- at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.checkAnalysis(CheckAnalysis.scala:78)
- at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:91)
- at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:52)
- at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:67)
- at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withPlan(Dataset.scala:2884)
- at org.apache.spark.sql.Dataset.select(Dataset.scala:1150)
- at com.salesforce.op.evaluators.OpBinaryClassificationEvaluator.evaluateAll(OpBinaryClassificationEvaluator.scala:104)
- at com.salesforce.op.evaluators.OpBinaryClassificationEvaluator.evaluateAll(OpBinaryClassificationEvaluator.scala:54)
- at com.salesforce.op.OpWorkflowModel.com$salesforce$op$OpWorkflowModel$$saveScores(OpWorkflowModel.scala:388)
- at com.salesforce.op.OpWorkflowModel$$anonfun$scoreFn$2.apply(OpWorkflowModel.scala:347)
- at com.salesforce.op.OpWorkflowModel$$anonfun$scoreFn$2.apply(OpWorkflowModel.scala:339)
- at com.salesforce.op.OpWorkflowModel.scoreAndEvaluate(OpWorkflowModel.scala:306)
- at com.salesforce.op.OpWorkflowModel.evaluate(OpWorkflowModel.scala:321)
- at com.salesforce.op.OpWorkflowRunner.evaluate(OpWorkflowRunner.scala:280)
- at com.salesforce.op.OpWorkflowRunner.run(OpWorkflowRunner.scala:311)
- at com.salesforce.op.OpAppWithRunner.run(OpApp.scala:211)
- at com.salesforce.op.OpApp.main(OpApp.scala:182)
- at com.salesforce.hw.titanic.OpTitanic.main(OpTitanic.scala)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
- at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
- at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
- at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
- at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
- 18/09/20 03:09:04 INFO OpWorkflowRunner: Total run time: 15.411s
- FAILURE: Build failed with an exception.
- * What went wrong:
- Execution failed for task ':sparkSubmit'.
- > Process 'command '/Users/yodha34/spark-2.2.1-bin-hadoop2.7/bin/spark-submit'' finished with non-zero exit value 1
- * Try:
- Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
- * Get more help at https://help.gradle.org
- BUILD FAILED in 28s
Add Comment
Please, Sign In to add comment