Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import org.apache.spark.sql.SQLContext
- import org.apache.spark.{SparkConf, SparkContext}
- object MyEnum extends Enumeration {
- type MyEnum = Value
- val Hello, World = Value
- }
- case class MyData(field: String, other: MyEnum.Value)
- object EnumTest {
- def main(args: Array[String]): Unit = {
- val sparkConf = new SparkConf().setAppName("test").setMaster("local[*]")
- val sc = new SparkContext(sparkConf)
- val sqlCtx = new SQLContext(sc)
- import sqlCtx.implicits._
- val df = sc.parallelize(Array(MyData("hello", MyEnum.World))).toDF()
- println(s"df: ${df.collect().mkString(",")}}")
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement