Guest User

Untitled

a guest
May 10th, 2018
256
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 6.08 KB | None | 0 0
  1. wanderer@wanderer-Lenovo-IdeaPad-S510p:~$ su - hduse
  2. Password:
  3.  
  4. hduse@wanderer-Lenovo-IdeaPad-S510p:~$ cd /usr/local/hadoop/sbin
  5.  
  6. hduse@wanderer-Lenovo-IdeaPad-S510p:/usr/local/hadoop/sbin$ start-all.sh
  7.  
  8. This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh
  9. Starting namenodes on [localhost]
  10. hduse@localhost's password:
  11. localhost: starting namenode, logging to /usr/local/hadoop/logs/hadoop-hduse-namenode-wanderer-Lenovo-IdeaPad-S510p.out
  12. hduse@localhost's password:
  13. localhost: starting datanode, logging to /usr/local/hadoop/logs/hadoop-hduse-datanode-wanderer-Lenovo-IdeaPad-S510p.out
  14. Starting secondary namenodes [0.0.0.0]
  15. hduse@0.0.0.0's password:
  16. 0.0.0.0: starting secondarynamenode, logging to /usr/local/hadoop/logs/hadoop-hduse-secondarynamenode-wanderer-Lenovo-IdeaPad-S510p.out
  17. starting yarn daemons
  18. starting resourcemanager, logging to /usr/local/hadoop/logs/yarn-hduse-resourcemanager-wanderer-Lenovo-IdeaPad-S510p.out
  19. hduse@localhost's password:
  20. localhost: starting nodemanager, logging to /usr/local/hadoop/logs/yarn-hduse-nodemanager-wanderer-Lenovo-IdeaPad-S510p.out
  21.  
  22. hduse@wanderer-Lenovo-IdeaPad-S510p:/usr/local/hadoop/sbin$ jps
  23. 7940 Jps
  24. 7545 ResourceManager
  25. 7885 NodeManager
  26.  
  27. hduse@wanderer-Lenovo-IdeaPad-S510p:/usr/local/hadoop/sbin$ stop-all.sh
  28. This script is Deprecated. Instead use stop-dfs.sh and stop-yarn.sh
  29. Stopping namenodes on [localhost]
  30. hduse@localhost's password:
  31. localhost: no namenode to stop
  32. hduse@localhost's password:
  33. localhost: no datanode to stop
  34. Stopping secondary namenodes [0.0.0.0]
  35. hduse@0.0.0.0's password:
  36. 0.0.0.0: no secondarynamenode to stop
  37. stopping yarn daemons
  38. stopping resourcemanager
  39. hduse@localhost's password:
  40. localhost: stopping nodemanager
  41. no proxyserver to stop
  42.  
  43. vi ~/.bashrc
  44.  
  45. #HADOOP VARIABLES START
  46. export JAVA_HOME=/usr/lib/jvm/java-8-oracle/
  47. export HADOOP_INSTALL=/usr/local/hadoop
  48. export PATH=$PATH:$HADOOP_INSTALL/bin
  49. export PATH=$PATH:$HADOOP_INSTALL/sbin
  50. export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
  51. export HADOOP_COMMON_HOME=$HADOOP_INSTALL
  52. export HADOOP_HDFS_HOME=$HADOOP_INSTALL
  53. export YARN_HOME=$HADOOP_INSTALL
  54. export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native
  55. export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib"
  56. export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
  57. export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib"
  58. #HADOOP VARIABLES END
  59.  
  60. vi /usr/local/hadoop/etc/hadoop/hdfs-site.xml
  61.  
  62. <configuration>
  63. <property>
  64. <name>dfs.replication</name>
  65. <value>1</value>
  66. <description>Default block replication.
  67. The actual number of replications can be specified when the file is created.
  68. The default is used if replication is not specified in create time.
  69. </description>
  70. </property>
  71. <property>
  72. <name>dfs.namenode.name.dir</name>
  73. <value>file:/usr/local/hadoop_store/hdfs/namenode</value>
  74. </property>
  75. <property>
  76. <name>dfs.datanode.data.dir</name>
  77. <value>file:/usr/local/hadoop_store/hdfs/datanode</value>
  78. </property>
  79. </configuration>
  80.  
  81. vi /usr/local/hadoop/etc/hadoop/hadoop-env.sh
  82.  
  83. export JAVA_HOME=/usr/lib/jvm/java-8-oracle/
  84. export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
  85.  
  86. for f in $HADOOP_HOME/contrib/capacity-scheduler/*.jar; do
  87. if [ "$HADOOP_CLASSPATH" ]; then
  88. export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f
  89. else
  90. export HADOOP_CLASSPATH=$f
  91. fi
  92. done
  93.  
  94. export HADOOP_OPTS="$HADOOP_OPTS -Djava.net.preferIPv4Stack=true"
  95. export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
  96. export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
  97.  
  98. export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_SECONDARYNAMENODE_OPTS"
  99.  
  100. export HADOOP_NFS3_OPTS="$HADOOP_NFS3_OPTS"
  101. export HADOOP_PORTMAP_OPTS="-Xmx512m $HADOOP_PORTMAP_OPTS"
  102.  
  103. # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
  104. export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
  105. export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
  106.  
  107. export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
  108. export HADOOP_PID_DIR=${HADOOP_PID_DIR}
  109. export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
  110.  
  111. # A string representing this instance of hadoop. $USER by default.
  112. export HADOOP_IDENT_STRING=$USER
  113.  
  114. vi /usr/local/hadoop/etc/hadoop/core-site.xml
  115. <configuration>
  116. <property>
  117. <name>hadoop.tmp.dir</name>
  118. <value>/app/hadoop/tmp</value>
  119. <description>A base for other temporary directories.</description>
  120. </property>
  121.  
  122. <property>
  123. <name>fs.default.name</name>
  124. <value>hdfs://localhost:54310</value>
  125. <description>The name of the default file system. A URI whose
  126. scheme and authority determine the FileSystem implementation. The
  127. uri's scheme determines the config property (fs.SCHEME.impl) naming
  128. the FileSystem implementation class. The uri's authority is used to
  129. determine the host, port, etc. for a filesystem.</description>
  130. </property>
  131. </configuration>
  132.  
  133. vi /usr/local/hadoop/etc/hadoop/mapred-site.xml
  134. <configuration>
  135. <property>
  136. <name>mapred.job.tracker</name>
  137. <value>localhost:54311</value>
  138. <description>The host and port that the MapReduce job tracker runs
  139. at. If "local", then jobs are run in-process as a single map
  140. and reduce task.
  141. </description>
  142. </property>
  143. </configuration>
  144.  
  145. javac 1.8.0_66
  146.  
  147. java version "1.8.0_66"
  148. Java(TM) SE Runtime Environment (build 1.8.0_66-b17)
  149. Java HotSpot(TM) 64-Bit Server VM (build 25.66-b17, mixed mode)
  150.  
  151. localhost: starting namenode, logging to /usr/local/hadoop/logs/hadoop-hduse-namenode-wanderer-Lenovo-IdeaPad-S510p.out
  152. ocalhost: starting datanode, logging to /usr/local/hadoop/logs/hadoop-hduse-datanode-wanderer-Lenovo-IdeaPad-S510p.out
  153. 0.0.0.0: starting secondarynamenode, logging to /usr/local/hadoop/logs/hadoop-hduse-secondarynamenode-wanderer-Lenovo-IdeaPad-S510p.out
  154. starting resourcemanager, logging to /usr/local/hadoop/logs/yarn-hduse-resourcemanager-wanderer-Lenovo-IdeaPad-S510p.out
  155. localhost: starting nodemanager, logging to /usr/local/hadoop/logs/yarn-hduse-nodemanager-wanderer-Lenovo-IdeaPad-S510p.out
Add Comment
Please, Sign In to add comment