Guest User

Untitled

a guest
Oct 15th, 2018
77
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.84 KB | None | 0 0
  1. > sc <- spark_connect(master = "yarn-client", config = conf, version = '2.2.0')
  2.  
  3. Sys.setenv(SPARK_HOME="/usr/lib/spark")
  4. options(rsparkling.sparklingwater.version = '2.0.3')
  5. # Configure cluster (c3.4xlarge 30G 16core 320disk)
  6. conf <- spark_config()
  7. conf$'sparklyr.shell.executor-memory' <- "20g"
  8. conf$'sparklyr.shell.driver-memory' <- "20g"
  9. conf$spark.executor.cores <- 16
  10. conf$spark.executor.memory <- "20G"
  11. conf$spark.yarn.am.cores <- 16
  12. conf$spark.yarn.am.memory <- "20G"
  13. conf$spark.executor.instances <- 8
  14. conf$spark.dynamicAllocation.enabled <- "false"
  15. conf$maximizeResourceAllocation <- "true"
  16. conf$spark.default.parallelism <- 32
  17. # Connect to cluster
  18. sc <- spark_connect(master = "yarn-client", config = conf, version = '2.2.0') # error shows after executing this line
  19.  
  20. C:sparkspark-2.3.1-bin-hadoop2.7
  21.  
  22. C:sparkspark-2.3.1-bin-hadoop2.7
Add Comment
Please, Sign In to add comment