hadoop单点配置
安装JDK脚本
tar -xzvf jdk-8u171-linux-x64.tar.gz
mv jdk1.8.0_171/ /usr/local/
sudo tee -a /etc/profile <<-'EOF'
JAVA_HOME=/usr/local/jdk1.8.0_171
JRE_HOME=$JAVA_HOME/jre
CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export JAVA_HOME JRE_HOME CLASS_PATH PATH
EOF
source /etc/profile
1.配置环境变量
vim /etc/profile
export HADOOP_HOME=/root/hadoop-3.1.0
export PATH=.:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$JAVA_HOME/bin:$PATH
2.免密登录
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys
3.hadoop-env.sh
export JAVA_HOME=/usr/local/jdk1.8.0_171
4.hadoop-env.sh
export JAVA_HOME=/usr/local/jdk1.8.0_171
5.core-site.xml
<property>
<name>fs.default.name</name>
<value>hdfs://127.0.0.1:9000</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://127.0.0.1:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/root/hadoop-3.1.0/tmp</value>
</property>
6.hdfs-site.xml
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.name.dir</name>
<value>/root/hadoop-3.1.0/hdfs/name</value>
</property>
<property>
<name>dfs.data.dir</name>
<value>/root/hadoop-3.1.0/hdfs/data</value>
</property>
<property>
<name>dfs.http.address</name>
<value>0.0.0.0:50070</value>
</property>
7.mapred-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
8.yarn-site.xml
<property>
<name>yarn.resourcemanager.hostname</name>
<value>localhost</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
9.sbin/start-dfs.sh和sbin/stop-dfs.sh
HDFS_DATANODE_USER=root
HADOOP_SECURE_DN_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
10.sbin/start-yarn.sh和sbin/stop-yarn.sh
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root