hadoop单机部署

1.安装java
2.下载hadoop
wget http://mirrors.hust.edu.cn/apache/hadoop/common/stable2/hadoop-3.2.1.tar.gz
3.安装
tar -xzvf hadoop-3.2.1.tar.gz
mv hadoop-3.2.1 hadoop

vim etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/share/dev/jdk8

vim core-site.xml
<configuration>
    <property>
        <name>hadoop.tmp.dir</name>
        <value>file:/data/hadoop/tmp</value>
        <description>A base for other temporary directories</description>
    </property>
    <property>
        <name>fs.defaultFS</name>
        <value>hdfs://172.28.8.8:9000</value>
        <description>URI of HDFS NameNode</description>
        </property>
</configuration>
		
vim hdfs-site.xml
<configuration>
    <property>
        <name>dfs.replication</name>
        <value>1</value>
    </property>
    <property>
        <name>dfs.namenode.name.dir</name>
        <value>file:/data/hadoop/tmp/dfs/name</value>
    </property>
    <property>
        <name>dfs.datanode.data.dir</name>
        <value>file:/data/hadoop/tmp/dfs/data</value>
    </property>
</configuration>  

ssh-keygen
ssh-copy-id 172.28.8.8
ssh 172.28.8.8

4.本地运行 MapReduce 作业
a.格式化文件
bin/hdfs namenode -format
b.启动namenode datanode
sbin/start-dfs.sh
提示Attempting to operate on hdfs namenode as root
vim start-dfs.sh/stop-dfs.sh
#!/usr/bin/env bash
HDFS_DATANODE_USER=root
HADOOP_SECURE_DN_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
c.浏览NameNode的Web界面http://172.28.8.8:9870
d.创建MapReduce作业所需的HDFS目录
bin/hdfs dfs -mkdir /user
bin/hdfs dfs -mkdir /user/<username>

5.单个节点上的 YARN
a.修改配置
vim etc/hadoop/mapred-site.xml
<configuration>
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>
    <property>
        <name>mapreduce.application.classpath</name>
        <value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*</value>
    </property>
</configuration>

vim etc/hadoop/yarn-site.xml
<configuration>
    <property>
        <name>yarn.nodemanager.aux-services</name>
        <value>mapreduce_shuffle</value>
    </property>
    <property>
        <name>yarn.nodemanager.env-whitelist</name>
        <value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME</value>
    </property>
</configuration>

b.修改脚本root运行
vim start-yarn.sh/stop-yarn.sh
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
c.sbin/start-yarn.sh
d.http://localhost:8088/
sbin/stop-yarn.sh

##参考链接
http://hadoop.apache.org/docs/current3/hadoop-project-dist/hadoop-common/SingleCluster.html#Execution
posted @ 2020-07-31 20:24  kylingx  阅读(230)  评论(0编辑  收藏  举报