Guest User

Untitled

a guest
Feb 18th, 2018
89
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.16 KB | None | 0 0
  1. package org.dongchimi.spark;
  2.  
  3. import org.apache.spark.SparkConf;
  4. import org.apache.spark.api.java.JavaSparkContext;
  5. import org.apache.spark.sql.Dataset;
  6. import org.apache.spark.sql.Row;
  7. import org.apache.spark.sql.SparkSession;
  8.  
  9. public class SimpleDataApp {
  10. public static void main(String[] args) {
  11.  
  12. // Spark 기본 설정.
  13. SparkConf conf = new SparkConf().setMaster("local").setAppName("Simple Application");
  14. JavaSparkContext sc = new JavaSparkContext(conf);
  15.  
  16. // session 설정.
  17. SparkSession spark = SparkSession
  18. .builder()
  19. .appName("Java Spark SQL basic example")
  20. .config("spark.sql.shuffle.partitions", 6)
  21. .getOrCreate();
  22.  
  23. // db 및 테이블 설정
  24. String url = "jdbc:mysql://yourHostname:3306/yourDatabaseName";
  25. Dataset<Row> load = spark
  26. .read()
  27. .format("jdbc")
  28. .option("driver", "com.mysql.jdbc.Driver")
  29. .option("url", url)
  30. .option("user", "yourUserName")
  31. .option("password", "'yourPassword")
  32. .option("dbtable", "yourTableName")
  33. .load();
  34.  
  35.  
  36. load.show();
  37. // Dataset<Row> count = load.groupBy("RECORD_DATE").count().orderBy("count");
  38. // count.show();
  39. }
  40. }
Add Comment
Please, Sign In to add comment