Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package Spark
- import org.apache.spark.sql.SQLContext
- import org.apache.spark.sql.types.{StructField, StructType}
- import org.apache.spark.{SparkConf, SparkContext}
- import org.apache.spark.sql.types.IntegerType
- import org.apache.spark.sql._
- object SparkDF1 {
- def main(args: Array[String]): Unit = {
- /********************************* SparkContext **********************************/
- val SConf = new SparkConf().setMaster("local").setAppName("DF in 1.x")
- val sc = new SparkContext(SConf)
- val ssql = new SQLContext(sc)
- val rdd = Array(1,2,3,4,5,6,7)
- val schema = StructType(
- StructField("Number", IntegerType, false) :: Nil
- )
- val row = rdd.map(x => Row(x))
- val DF = ssql.createDataFrame(row, schema)
- DF.printSchema()
- DF.show()
- }
- }
Add Comment
Please, Sign In to add comment