Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Configuration config = new Configuration();
- config.set("hdp.version", "2.2.4.2-2");
- // fill properties from core-site.xml and yarn-site.xml to read data from hdfs
- fillProperties(config, getPropXmlAsMap("/etc/hadoop/conf/core-site.xml"));
- fillProperties(config, getPropXmlAsMap("/etc/hadoop/conf/yarn-site.xml"));
- // spark configuration
- SparkConf sparkConf = new SparkConf();
- sparkConf.setMaster("yarn-cluster");
- sparkConf.set("hdp.version", "2.2.4.2-2");
- // run args
- List<String> runArgs = Arrays.asList(
- "--num-executors", "2",
- "--executor-memory", "300M",
- "--executor-cores", "1",
- "--class", "job_class_name",
- "--addJars", "hdfs://host:port/user/hdfs/spark-assembly-1.2.1.2.2.4.2-2-hadoop2.6.0.2.2.4.2-2.jar", // previously uploaded
- "--jar", "hdfs://host:port/user/hdfs/application.jar",
- "--arg", ".." // arg that passed to main method
- );
- // args and run
- ClientArguments args = new ClientArguments(runArgs.toArray(new String[runArgs.size()]), sparkConf);
- Client client = new Client(args, config, sparkConf);
- ApplicationId applicationId = client.submitApplication();
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement