hadoop安装

//实验准备
1. hostname
192.168.216.128 master
192.168.216.129 slave1
192.168.216.130 slave2

2. vi /etc/hosts
192.168.216.128 master
192.168.216.129 slave1
192.168.216.130 slave2

3. 关闭防火墙


//master

[root@localhost ~]# cd /usr/local/src
[root@localhost src]# wget http://mirror.bit.edu.cn/apache/hadoop/common/hadoop-2.7.1/hadoop-2.7.1.tar.gz
[root@localhost src]# tar zxvf hadoop-2.7.1.tar.gz
[root@localhost src]# mv hadoop-2.7.1 /usr/local/hadoop
[root@localhost src]# cd /usr/local/hadoop/
[root@localhost hadoop]# mkdir tmp dfs dfs/data dfs/name

//接下来要将 “/usr/local/hadoop”这个目录分别拷贝到两个slave上

[root@localhost hadoop]# yum install -y rsync
[root@localhost hadoop]# yum install -y openssh-clients

//安装好rsync,将文件拷贝到另外两台slave中
[root@master ~]# rsync -av /usr/local/hadoop/ slave1:/usr/local/
....
....
.........................................[yes/no] yes
[root@master ~]#  rsync -av /usr/local/hadoop/ slave2:/usr/local/
....
....
.........................................[yes/no] yes

//完成后去两台slave主机里检查是否存在拷贝好的文件,应该会在/usr/local/share里

 

posted @ 2016-08-28 21:09  Frankiee  阅读(393)  评论(0编辑  收藏  举报