Guest User

Untitled

a guest
Jul 22nd, 2018
67
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.77 KB | None | 0 0
  1. package Spark
  2.  
  3. import org.apache.spark.sql.SQLContext
  4. import org.apache.spark.sql.types.{StructField, StructType}
  5. import org.apache.spark.{SparkConf, SparkContext}
  6. import org.apache.spark.sql.types.IntegerType
  7. import org.apache.spark.sql._
  8.  
  9.  
  10. object SparkDF1 {
  11. def main(args: Array[String]): Unit = {
  12. /********************************* SparkContext **********************************/
  13. val SConf = new SparkConf().setMaster("local").setAppName("DF in 1.x")
  14. val sc = new SparkContext(SConf)
  15. val ssql = new SQLContext(sc)
  16.  
  17. val rdd = Array(1,2,3,4,5,6,7)
  18.  
  19. val schema = StructType(
  20. StructField("Number", IntegerType, false) :: Nil
  21. )
  22.  
  23. val row = rdd.map(x => Row(x))
  24. val DF = ssql.createDataFrame(row, schema)
  25.  
  26. DF.printSchema()
  27. DF.show()
  28.  
  29.  
  30. }
  31.  
  32. }
Add Comment
Please, Sign In to add comment