Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Exception in thread "main" org.apache.spark.sql.AnalysisException: undefined function countDistinct;
- at org.apache.spark.sql.catalyst.analysis.SimpleFunctionRegistry$$anonfun$2.apply(FunctionRegistry.scala:61)
- at org.apache.spark.sql.catalyst.analysis.SimpleFunctionRegistry$$anonfun$2.apply(FunctionRegistry.scala:61)
- at scala.Option.getOrElse(Option.scala:120)
- at org.apache.spark.sql.catalyst.analysis.SimpleFunctionRegistry.lookupFunction(FunctionRegistry.scala:60)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$$anonfun$apply$10$$anonfun$applyOrElse$5$$anonfun$applyOrElse$24.apply(Analyzer.scala:506)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$$anonfun$apply$10$$anonfun$applyOrElse$5$$anonfun$applyOrElse$24.apply(Analyzer.scala:506)
- at org.apache.spark.sql.catalyst.analysis.package$.withPosition(package.scala:48)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$$anonfun$apply$10$$anonfun$applyOrElse$5.applyOrElse(Analyzer.scala:505)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$$anonfun$apply$10$$anonfun$applyOrElse$5.applyOrElse(Analyzer.scala:502)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:227)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:227)
- at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51)
- at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:226)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:232)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:232)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:249)
- at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
- at scala.collection.Iterator$class.foreach(Iterator.scala:727)
- at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
- at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
- at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
- at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
- at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
- at scala.collection.AbstractIterator.to(Iterator.scala:1157)
- at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
- at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
- at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
- at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
- at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:279)
- at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:232)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:232)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:232)
- at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:249)
- at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
- at scala.collection.Iterator$class.foreach(Iterator.scala:727)
- at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
- at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
- at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
- at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
- at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
- at scala.collection.AbstractIterator.to(Iterator.scala:1157)
- at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
- at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
- at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
- at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
- at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:279)
- at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:232)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
- at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
- at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
- at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
- at scala.collection.immutable.List.foreach(List.scala:318)
- at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
- at scala.collection.AbstractTraversable.map(Traversable.scala:105)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
- at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
- at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
- at scala.collection.Iterator$class.foreach(Iterator.scala:727)
- at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
- at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
- at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
- at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
- at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
- at scala.collection.AbstractIterator.to(Iterator.scala:1157)
- at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
- at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
- at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
- at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
- at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$$anonfun$apply$10.applyOrElse(Analyzer.scala:502)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$$anonfun$apply$10.applyOrElse(Analyzer.scala:500)
- at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57)
- at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57)
- at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51)
- at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:56)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$.apply(Analyzer.scala:500)
- at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveFunctions$.apply(Analyzer.scala:499)
- at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
- at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
- at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:111)
- at scala.collection.immutable.List.foldLeft(List.scala:84)
- at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
- at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
- at scala.collection.immutable.List.foreach(List.scala:318)
- at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
- at org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:916)
- at org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:916)
- at org.apache.spark.sql.SQLContext$QueryExecution.assertAnalyzed(SQLContext.scala:914)
- at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:132)
- at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:51)
- at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:725)
- at main.scala.query.SparkSqlQuery.run(SparkSqlQuery.scala:15)
- at main.scala.operator.string.Bootstrap.<init>(Bootstrap.scala:36)
- at main.scala.operator.string.Concat$.main(Concat.scala:17)
- at main.scala.operator.string.Concat.main(Concat.scala)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement