Advertisement
Guest User

Untitled

a guest
Jul 28th, 2017
56
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.30 KB | None | 0 0
  1. #include "stdafx.h"
  2. #define BUFSIZE 4096
  3. #define VARNAME TEXT("MAHOUT_CP")
  4. int _tmain(int argc, _TCHAR* argv[]) {
  5. DWORD dwLength; LPTSTR pszBuffer;
  6. pszBuffer = (LPTSTR)malloc(BUFSIZE*sizeof(TCHAR));
  7. dwLength = GetEnvironmentVariable(VARNAME, pszBuffer, BUFSIZE);
  8. if (dwLength > 0) { _tprintf(TEXT("%sn"), pszBuffer); return 0; }
  9. return 1;
  10. }
  11.  
  12. set SCALA_HOME=C:Progra~2scala
  13. set SPARK_HOME=C:spark
  14. set HADOOP_HOME=C:hadoop
  15. set MAHOUT_HOME=C:mahout
  16. set SPARK_SCALA_VERSION=2.10
  17. set MASTER=local[2]
  18. set MAHOUT_LOCAL=true
  19. set path=%SCALA_HOME%bin;%SPARK_HOME%bin;%PATH%
  20. cd /D %SPARK_HOME%
  21. set SPARK_CP=%SPARK_HOME%conf;%SPARK_HOME%libxxx.jar;...other jars...
  22. set MAHOUT_CP=%MAHOUT_HOME%libxxx.jar;...other jars...;%MAHOUT_HOME%xxx.jar;...other jars...;%SPARK_CP%;%MAHOUT_HOME%libsparkxxx.jar;%MAHOUT_HOME%libhadoopxxx.jar;%MAHOUT_HOME%srcconf;%JAVA_HOME%libtools.jar
  23. start "master0" "%JAVA_HOME%binjava" -cp "%SPARK_CP%" -Xms1g -Xmx1g org.apache.spark.deploy.master.Master --ip localhost --port 7077 --webui-port 8082 >>out-master0.log 2>>out-master0.err
  24. start "worker1" "%JAVA_HOME%binjava" -cp "%SPARK_CP%" -Xms1g -Xmx1g org.apache.spark.deploy.worker.Worker spark://localhost:7077 --webui-port 8083 >>out-worker1.log 2>>out-worker1.err
  25. ...you may add more workers here...
  26. cd /D %MAHOUT_HOME%
  27. "%JAVA_HOME%binjava" -Xmx4g -classpath "%MAHOUT_CP%" "org.apache.mahout.sparkbindings.shell.Main"
  28.  
  29. https://mahout.apache.org/users/sparkbindings/play-with-shell.html
  30.  
  31. "C:Program Files (x86)GoogleChromeApplicationchrome" --disable-web-security http://localhost:4040
  32.  
  33. mkdir C:tmphive
  34.  
  35. %HADOOP_HOME%binwinutils.exe chmod 777 /tmp/hive
  36.  
  37. %SPARK_HOME%binspark-shell
  38.  
  39. import sys
  40. import os
  41. spark_home = 'C:Apachespark-1.6.1'
  42.  
  43. sys.path.insert(0, os.path.join(spark_home, 'python'))
  44. sys.path.insert(0, os.path.join(spark_home, 'pythonlibpyspark.zip'))
  45. sys.path.insert(0, os.path.join(spark_home, 'pythonlibpy4j-0.9-src.zip'))
  46.  
  47. # Start a spark context:
  48. sc = pyspark.SparkContext()
  49.  
  50. #
  51. lines = sc.textFile(os.path.join(spark_home, "README.md")
  52. pythonLines = lines.filter(lambda line: "Python" in line)
  53. pythonLines.first()
  54.  
  55. C:UsersDesktopAsparkbin>spark-shell
  56.  
  57. C:UsersDesktopAsparkbin>pyspark
  58.  
  59. pip install findspark
  60.  
  61. import findspark
  62. findspark.init()
  63.  
  64. from pyspark import SparkContext
  65. from pyspark import SparkConf
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement