Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- @Bean
- public SparkConf sparkConf() {
- return new SparkConf()
- .setMaster("local[*]")
- .setAppName("test")
- .set("spark.app.id", "test")
- .set("spark.mongodb.input.uri", "mongodb://127.0.0.1/")
- .set("spark.mongodb.output.uri", "mongodb://127.0.0.1/")
- .set("spark.mongodb.input.database", "myDataBase")
- .set("spark.mongodb.output.database", "myDataBase");
- }
- @Bean
- public JavaSparkContext javaSparkContext() {
- return new JavaSparkContext(sparkConf());
- }
- @Bean
- public SQLContext sqlContext() {
- return new SQLContext(SparkSession
- .builder()
- .appName("eat")
- .master("local[*]")
- .config(sparkConf())
- .getOrCreate());
- }
- <dependency>
- <groupId>org.mongodb.spark</groupId>
- <artifactId>mongo-spark-connector_2.11</artifactId>
- <version>2.0.0</version>
- </dependency>
- ReadConfig readConfig = ReadConfig.create(sparkContext)
- .withOption("spark.mongodb.output.collection", "myCollection");
- JavaRDD<Document> rdd = MongoSpark.load(sparkContext, readConfig);
- "Missing collection name. Set via the 'spark.mongodb.input.uri'
- or 'spark.mongodb.input.collection' property"
- SparkConf()
- .setMaster("local[*]")
- .setAppName("test")
- .set("spark.app.id", "test")
- .set("spark.mongodb.input.uri", "mongodb://127.0.0.1/myDataBase.myCollection")
- .set("spark.mongodb.output.uri", "mongodb://127.0.0.1/myDataBase.myCollection")
- @SpringBootConfiguration
- public class SparkConfiguration {
- private final String MONGO_PREFIX = "mongodb://";
- private final String MONGO_INPUT_COLLECTION = "faqs";
- @Value(value = "${spring.data.mongodb.name}")
- private String mongoName;
- @Value(value = "${spring.data.mongodb.net.bindIp}")
- private String mongoHost;
- @Bean
- public SparkSession sparkSession() {
- return SparkSession.builder()
- .master("local[*]")
- .appName("eat-spark-cluster")
- .config("spark.app.id", "Eat")
- .config("spark.mongodb.input.uri", MONGO_PREFIX.concat(mongoHost).concat("/"))
- .config("spark.mongodb.input.database", mongoName)
- .config("spark.mongodb.input.collection", MONGO_INPUT_COLLECTION)
- .getOrCreate();
- }
- @Bean
- public JavaSparkContext javaSparkContext() {
- return JavaSparkContext.fromSparkContext(sparkSession().sparkContext());
- }
- }
- ReadConfig readConfig = ReadConfig.create(getJavaSparkContext()).withOption("collection", "my_collection");
- JavaMongoRDD<Document> placesRdd = MongoSpark.load(getJavaSparkContext(), readConfig);
- return placesRdd.collect();
- package mongo;
- import com.mongodb.spark.MongoSpark;
- import com.mongodb.spark.config.WriteConfig;
- import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
- import org.apache.spark.api.java.JavaRDD;
- import org.apache.spark.api.java.JavaSparkContext;
- import org.apache.spark.sql.SparkSession;
- import org.bson.Document;
- import java.util.Arrays;
- import java.util.HashMap;
- import java.util.Map;
- import java.util.Scanner;
- public class Connector {
- String db1="mongodb://127.0.0.1/";
- String db2= "mongodb://192.168.4.180/";
- String dbUrl = db1;
- String user = ";";
- String pass = "";
- String dbName = "test";
- String collName="spark";
- public static void main(String[] args) {
- Connector con=new Connector();
- JavaSparkContext jsc = con.connection();
- // con.writeToMongo(jsc);
- con.readFromMongo(jsc);
- Scanner sc= new Scanner(System.in);
- sc.next();
- }
- JavaSparkContext connection() {
- SparkSession ss = SparkSession.builder()
- .master("local")
- .appName("MongoConnector")
- .config("spark.mongodb.input.uri", dbUrl + dbName)
- .config("spark.mongodb.output.uri", dbUrl + dbName)
- .config("spark.mongodb.output.collection",collName)
- .config("spark.mongodb.input.collection",collName)
- .getOrCreate();
- JavaSparkContext jsc=new JavaSparkContext(ss.sparkContext());
- return jsc;
- // jsc.close();
- }
- void readFromMongo(JavaSparkContext jsc){
- JavaMongoRDD<Document> rdd = MongoSpark.load(jsc);
- System.out.print(rdd.collect());
- }
- void writeToMongo(JavaSparkContext jsc){
- JavaRDD<Document> rdd = jsc.parallelize(Arrays.asList(1, 2, 3))
- .map(x -> Document.parse("{spark: "+x+"}"));
- Map<String,String > writeconf=new HashMap<String,String>();
- writeconf.put("collection","spark");
- writeconf.put("writeConcern.w", "majority");
- WriteConfig writeConfig = WriteConfig.create(jsc).withOptions(writeconf);
- MongoSpark.save(rdd,writeConfig);
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement