Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package Spark
- import org.apache.spark.sql.{Row, SQLContext, SparkSession}
- import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
- object SparkDF1 {
- def main(args: Array[String]): Unit = {
- /********************************* SparkSession **********************************/
- val ss = SparkSession.builder().master("local").getOrCreate()
- val rdd = ss.sparkContext.parallelize(Array(1,2,3,4,5,6,7,8))
- val schema = StructType(
- fields = StructField("Number", IntegerType, false) :: Nil
- )
- val row = rdd.map(x => Row(x))
- val DF = ss.createDataFrame(row, schema)
- DF.printSchema()
- DF.show()
- }
Add Comment
Please, Sign In to add comment