Guest User

Untitled

a guest
Jul 22nd, 2018
76
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.63 KB | None | 0 0
  1. package Spark
  2.  
  3. import org.apache.spark.sql.{Row, SQLContext, SparkSession}
  4. import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
  5.  
  6. object SparkDF1 {
  7. def main(args: Array[String]): Unit = {
  8. /********************************* SparkSession **********************************/
  9. val ss = SparkSession.builder().master("local").getOrCreate()
  10.  
  11. val rdd = ss.sparkContext.parallelize(Array(1,2,3,4,5,6,7,8))
  12.  
  13. val schema = StructType(
  14. fields = StructField("Number", IntegerType, false) :: Nil
  15. )
  16.  
  17. val row = rdd.map(x => Row(x))
  18. val DF = ss.createDataFrame(row, schema)
  19.  
  20. DF.printSchema()
  21. DF.show()
  22.  
  23.  
  24. }
Add Comment
Please, Sign In to add comment