Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- public static void main(String[] args) throws InterruptedException {
- List<temp> listtmp = new ArrayList<>();
- SparkSession spark = SparkSession.builder()
- .master("local")
- .appName("MongoSparkConnectorIntro")
- .config("spark.mongodb.input.uri", "mongodb://127.0.0.1/temperature.templog")
- .config("spark.mongodb.output.uri", "mongodb://127.0.0.1/temperature.templog")
- .getOrCreate();
- // Create a JavaSparkContext using the SparkSession's SparkContext object
- JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
- /*Start Example: Read data from MongoDB************************/
- Dataset<Row> df = MongoSpark.load(jsc).toDF();
- /*End Example**************************************************/
- df.createOrReplaceTempView("temp");
- Dataset<Row> data = spark.sql("select * from temp");
- // Analyze data from MongoDB
- data.show();
- for(Row row : data.collectAsList()){
- listtmp.add(tempconvert.totemprow(row));
- }
- jsc.close();
- System.out.println(listtmp.size());
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement