Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import java.net.URI
- import java.nio.ByteBuffer
- import org.apache.hadoop.fs.Path
- import org.apache.hadoop.yarn.api.ApplicationConstants
- import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
- import org.apache.hadoop.yarn.api.records._
- import org.apache.hadoop.yarn.client.api.YarnClient
- import org.apache.hadoop.yarn.conf.YarnConfiguration
- import org.apache.hadoop.yarn.util.Records
- import scala.collection.JavaConverters._
- class Submit {
- private val yc = YarnClient.createYarnClient()
- def submit(resourceManagerUri: URI,
- appName: String = "something" + System.currentTimeMillis(),
- jar: String = "No jar defined",
- className: String = "com.sirsidynix.Transform"): Int = {
- //fetches the conf from a call to resourcemanagerUri/conf. This is in xml.
- val conf = ConfParser.fetchHadoopConf(resourceManagerUri.toString)
- conf.set("yarn.nodemanager.delete.debug-delay-sec", "36000")
- val otherConf = new org.apache.hadoop.conf.Configuration
- yc.init(new YarnConfiguration(conf))
- yc.start()
- val app = yc.createApplication()
- val response = app.getNewApplicationResponse
- val appContext = app.getApplicationSubmissionContext
- appContext.setApplicationName(appName)
- appContext.setQueue("default")
- appContext.setResource(Resource.newInstance(64, 1))
- appContext.setAMContainerSpec(makeContainer(appName))
- appContext.setApplicationType("IGNITOR")
- appContext.setApplicationTags(Set("IGNITOR").asJava)
- appContext.setMaxAppAttempts(1)
- yc.submitApplication(appContext)
- //perhaps return this? But it will be mostly useless. A short lived application that will merely execute the command spark-submit
- appContext.getApplicationId.getId
- }
- private def getAppStagingDir(appId: ApplicationId) = {
- ".ignitorStaging" + Path.SEPARATOR + appId.toString() + Path.SEPARATOR
- }
- private def makeContainer(name: String) = {
- val master = "yarn-cluster"
- val mainClass = "com.sirsidynix.Transform"
- val appJar = "s3n://thebucketofchoice/spark-application-sd.jar"
- val appName = s"$name:$mainClass"
- val command = Seq( s"""spark-submit --master $master --class $mainClass --name "$appName" $appJar""",
- s"1>${ApplicationConstants.LOG_DIR_EXPANSION_VAR }/stdout", s"2>${ApplicationConstants.LOG_DIR_EXPANSION_VAR }/stderr &")
- .mkString(" ")
- println(s"$command")
- val container = Records.newRecord(classOf[ContainerLaunchContext])
- container.setCommands(List(command).asJava)
- println(s"${container.getCommands }")
- container
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement