Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package org.dongchimi.spark;
- import org.apache.spark.SparkConf;
- import org.apache.spark.api.java.JavaSparkContext;
- import org.apache.spark.sql.Dataset;
- import org.apache.spark.sql.Row;
- import org.apache.spark.sql.SparkSession;
- public class SimpleDataApp {
- public static void main(String[] args) {
- // Spark 기본 설정.
- SparkConf conf = new SparkConf().setMaster("local").setAppName("Simple Application");
- JavaSparkContext sc = new JavaSparkContext(conf);
- // session 설정.
- SparkSession spark = SparkSession
- .builder()
- .appName("Java Spark SQL basic example")
- .config("spark.sql.shuffle.partitions", 6)
- .getOrCreate();
- // db 및 테이블 설정
- String url = "jdbc:mysql://yourHostname:3306/yourDatabaseName";
- Dataset<Row> load = spark
- .read()
- .format("jdbc")
- .option("driver", "com.mysql.jdbc.Driver")
- .option("url", url)
- .option("user", "yourUserName")
- .option("password", "'yourPassword")
- .option("dbtable", "yourTableName")
- .load();
- load.show();
- // Dataset<Row> count = load.groupBy("RECORD_DATE").count().orderBy("count");
- // count.show();
- }
- }
Add Comment
Please, Sign In to add comment