展开
拓展 关闭
订阅号推广码
GitHub
视频
公告栏 关闭

hadoop单机版安装

  • 前提:已经安装jdk

  • 将hadoop-2.9.2.tar.gz上传到服务器

# 解压
tar -zxvf hadoop-2.9.2.tar.gz
# 移动
mv hadoop-2.9.2 /usr/local/software
  • 配置
vi /etc/profile

# 配置如下
export HADOOP_HOME=/usr/local/software/hadoop-2.9.2
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH

# 生效
source /etc/profile
  • 配置xml
# 编辑core-site.xml
vi /usr/local/software/hadoop-2.9.2/etc/hadoop/core-site.xml
# 修改为如下
# 注意主机名称,vi /etc/hostname -> hostnamectl set-hostname master
# 注意映射地址,vi /etc/hosts -> 192.168.128.129 master
<configuration>
   <property>
       <name>fs.defaultFS</name>
       <value>hdfs://master:9000</value>
   </property>
   <property>
       <name>hadoop.tmp.dir</name>
       <value>/opt/software/hadoop-2.9.2/tmp</value>
   </property>
</configuration>

# 编辑hdfs-site.xml
vi /usr/local/software/hadoop-2.9.2/etc/hadoop/hdfs-site.xml
# 修改为如下
# 注意name和data目录如果没有需要创建,或者某些权限大的用户操作时自动创建
<configuration>
   <property>
       <name>dfs.name.dir</name>
       <value>/opt/software/hadoop-2.9.2/dfs/name</value>
   </property>
   <property>
       <name>dfs.data.dir</name>
       <value>/opt/software/hadoop-2.9.2/dfs/data</value>
   </property>
   <property>
       <name>dfs.replication</name>
       <value>3</value>
   </property>
   <property>
       <name>dfs.permissions</name>
       <value>false</value>
   </property>
</configuration>

# 复制一份
cp /usr/local/software/hadoop-2.9.2/etc/hadoop/mapred-site.xml.template /usr/local/software/hadoop-2.9.2/etc/hadoop/mapred-site.xml
# 编辑mapred-site.xml
vi /usr/local/software/hadoop-2.9.2/etc/hadoop/mapred-site.xml
# 修改为如下
<configuration>
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>
</configuration>

# 编辑yarn-site.xml
vi /usr/local/software/hadoop-2.9.2/etc/hadoop/yarn-site.xml
# 修改为如下
<configuration>
   <property>
       <name>yarn.nodemanager.aux-services</name>
       <value>mapreduce_shuffle</value>
   </property>
</configuration>
  • 免密登录
# 生成密钥
[root@master ~]# ssh-keygen -t rsa
# 执行,输入yes,输入密码
[root@master ~]# ssh-copy-id master
  • 格式化
hdfs namenode -format
  • 启动报错
# 报错如下
localhost: Error: JAVA_HOME is not set and could not be found.

# 切到/usr/local/software/hadoop-2.9.2/etc/hadoop目录
cd /usr/local/software/hadoop-2.9.2/etc/hadoop
# 编辑
vi hadoop-env.sh

# 修改java_home路径和hadoop_conf_dir路径,将原来的注释,添加如下
export JAVA_HOME=/usr/local/software/jdk1.8.0_181
export HADOOP_CONF_DIR=/usr/local/software/hadoop-2.9.2/etc/hadoop

# 重新加载使修改生效
source hadoop-env.sh
  • 启动
start-dfs.sh
start-yarn.sh

# 查看
[root@master hadoop]# jps
16048 SecondaryNameNode
15891 DataNode
16195 ResourceManager
15768 NameNode
16537 Jps
  • 停止
stop-dfs.sh
stop-yarn.sh
posted @ 2023-12-12 21:52  DogLeftover  阅读(11)  评论(0编辑  收藏  举报