Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import org.apache.spark.{SparkConf, SparkContext}
- import org.apache.spark.sql.SparkSession
- import org.apache.spark.mllib.linalg.Vectors
- import org.apache.spark.rdd.RDD
- import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}
- object computeSxbph_3_30 {
- def main(args: Array[String]): Unit = {
- val conf = new SparkConf().setAppName("computeSxbph_3_30").setMaster("spark://192.168.41.143:7077")
- val sc = new SparkContext(conf)
- val input = sc.parallelize(List(
- List("001_2019_3_20", 1, 3, 5),
- List("001_2019_3_20", 4, 10, 15),
- List("001_2019_3_20", 41, 1203, 35),
- List("002_2019_3_20", 32, 4, 9),
- List("002_2019_3_20", 12, 24, 89),
- List("002_2019_3_20", 2, 5, 19),
- List("003_2019_3_20", 1, 1, 94),
- List("003_2019_3_20", 10, 91, 4),
- List("003_2019_3_20", 14, 21, 43)
- ), 3)
- val maped: RDD[(String, (Double, Double, Double))] = input.map {
- x => {
- val key = x(0).toString;
- val v1 = x(1).toString.toInt;
- val v2 = x(2).toString.toInt;
- val v3 = x(3).toString.toInt;
- (key, (v1, v2, v3))
- }
- }
- val topItem_set = maped.map(ele => (ele._1, (ele._2._1, ele._2._2, ele._2._3))).groupByKey()
- topItem_set.map(a=>{
- val v1 = Vectors.dense(a._2.toList(0)._1.toDouble,a._2.toList(0)._2.toDouble,a._2.toList(0)._3)
- val v2 = Vectors.dense(a._2.toList(1)._1.toDouble,a._2.toList(1)._2.toDouble,a._2.toList(1)._3)
- val v3 = Vectors.dense(a._2.toList(2)._1.toDouble,a._2.toList(2)._2.toDouble,a._2.toList(2)._3)
- val rows = sc.parallelize(Seq(v1,v2,v3))
- val tmp = Seq(v1,v2,v3).toArray
- tmp.foreach(println(_))
- println("____分割线______")
- val summary: MultivariateStatisticalSummary = Statistics.colStats(rows)
- summary.max
- }).collect()
- }
- }
- 19/03/20 23:47:25 ERROR TaskSetManager: Task 2 in stage 11.0 failed 1 times; aborting job
- org.apache.spark.SparkException: Job aborted due to stage failure: Task 2 in stage 11.0 failed 1 times, most recent failure:
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement