hadoop 单台机器伪集群
-
网络设置
-
/etc/sysconfig/network-scripts/ 目录下
ifcfg-eth0文件
BOOTPROTO="static" DEFROUTE="yes" IPADDR=192.168.1.10 GATEWAY=192.168.1.1 DNS1=192.168.1.1 NETMASK=255.255.255.0 ONBOOT="yes"
-
/etc/ 目录下
hosts文件
192.168.1.10 hadoop10
-
systemctl restart network
-
设置环境变量
vim /etc/profile
-
java
##java_home JAVA_HOME=/opt/module/jdk1.8.0_151 JRE_HOME=$JAVA_HOME/jre CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin export JAVA_HOME JRE_HOME CLASS_PATH PATH
source /etc/profile
java -version
测试 -
hadoop
##hadoop_home export HADOOP_HOME=/opt/module/hadoop-2.10.1 export PATH=$PATH:$HADOOP_HOME/bin export PATH=$PATH:$HADOOP_HOME/sbin
hadoop
测试
-
-
修改配置文件
-
hadoop/etc/hadoop/core-site.xml
<configuration> <property> <!--指定namenode的地址--> <name>fs.defaultFS</name> <value>hdfs://127.0.0.1:9000</value> </property> <property> <!--指定namenode的地址--> <name>hadoop.tmp.dir</name> <value>/opt/module/hadoop-2.10.1/data/temp</value> </property> <!-- 当前用户全设置成root --> <property> <name>hadoop.http.staticuser.user</name> <value>root</value> </property> <!-- 不开启权限检查 --> <property> <name>dfs.permissions.enabled</name> <value>false</value> </property> </configuration>
-
hadoop/etc/hadoop/hdfs-site.xml
<!-- 指定hdfs副本的数量 --> <property> <name>dfs.replication</name> <value>1</value> </property>
-
hadoop/etc/hadoop/yarn-site.xml
<configuration> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> <property> <name>yarn.resourcemanager.hostname</name> <value>hadoop10</value> </property> <property> <name>yarn.log-aggregation-enable</name> <value>true</value> </property> <property> <name>yarn.log-aggregation-retain-seconds</name> <value>604800</value> </property> </configuration>
-
hadoop/etc/hadoop/mapred-site.xml
<configuration> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> <property> <name>mapreduce.jobhistory.address</name> <value>hadoop10:10020</value> </property> <property> <name>mapreduce.jobhistory.webapp.address</name> <value>hadoop10:19888</value> </property> </configuration>
-
-
启动
hdfs namenode -format sbin/hadoop-daemon.sh start namenode ./hadoop-daemon.sh start datanode http://192.168.1.10:50070 访问测试 sbin/yarn-daemon.sh start resourcemanager sbin/yarn-daemon.sh start nodemanager sbin/mr-jobhistory-daemon.sh start historyserver hdfs dfs -put /opt/module/hadoop-2.10.1/share/test.txt /input 将linux目录中的文件,上传到hdfs中 hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.10.1.jar wordcount /input /output
-
查看
http://192.168.1.10:50070 hdfs
http://192.168.1.10:8088 hadoop集群