hadoop伪分布式与Hbase安装
配置ssh
要点:要求免密码登陆
cd ~/.ssh
ssh-keygen -t rsa
cat ./id_rsa.pub >> ./authorized_keys
Hadoop 伪分布式安装
# java1.8 !important JAVA必须安装1.8,不要安装高版本,会不兼容
wget https://repo.huaweicloud.com/java/jdk/8u201-b09/jdk-8u201-linux-x64.tar.gz
sudo mkdir /usr/lib/jvm
sudo tar -zxf jdk-8u201-linux-x64.tar.gz -C /usr/lib/jvm
# hadoop3.3.6安装
wget https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-3.3.6/hadoop-3.3.6.tar.gz
sudo tar -zxf hadoop-3.3.6.tar.gz -C /usr/local/
sudo mv /usr/local/hadoop-3.3.6 /usr/local/hadoop
配置/etc/profile
# JAVA_HOME
export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_201
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
# HADOOP_HOME
export HADOOP_HOME=/usr/local/hadoop
export PATH=$PATH:${HADOOP_HOME}/bin
export PATH=$PATH:${HADOOP_HOME}/sbin
配置hadoop/etc/hadoop/core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/hadoop/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
配置hadoop/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/data</value>
</property>
</configuration>
/usr/local/hadoop/bin/hdfs namenode -format
配置hadoop/sbin/start-dfs.sh
HDFS_DATANODE_USER=username
HDFS_DATANODE_SECURE_USER=hdfs
HDFS_NAMENODE_USER=username
HDFS_SECONDARYNAMENODE_USER=username
配置hadoop/sbin/start-yarn.sh
YARN_RESOURCEMANAGER_USER=username
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=username
配置hadoop/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_201
export HADOOP=/usr/local/hadoop
export PATH=$PATH:/usr/local/hadoop/bin
/usr/local/hadoop/sbin/start-dfs.sh
测试wordcount
撰写data.txt
good better best
never let it rest
till good is better
and better best
good good study
day day up
today is a good day
hadoop fs -mkdir /input
hadoop fs -put data.txt /input
hadoop jar /usr/local/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.3.6.jar wordcount /input /output
hadoop fs -cat /output/part-r-00000
安装hbase
wget https://mirrors.tuna.tsinghua.edu.cn/apache/hbase/2.5.8/hbase-2.5.8-hadoop3-bin.tar.gz
sudo tar -zxvf hbase-2.5.8-hadoop3-bin.tar.gz -C /usr/local/
sudo mv /usr/local/hbase-2.5.8-hadoop3/ /usr/local/hbase
sudo chown -R username /usr/local/hbase/
配置~/.bashrc
# Hbase
export HBASE_HOME=/usr/local/hbase
export PATH=$PATH:${HBASE_HOME}/bin
配置/usr/local/hbase/conf/hbase-env.sh
export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_201/
export HBASE_CLASSPATH=/usr/local/hadoop/etc/hadoop/
export HBASE_MANAGES_ZK=true # 让hbase自己管理zookeeper
配置/usr/local/hbase/conf/hbase-site.xml
<configuration>
<property>
<name>hbase.rootdir</name>
<value>hdfs://localhost:9000/hbase</value>
</property>
<property>
<name>hbase.cluster.distributed</name>
<value>true</value>
</property>
<property>
<name>hbase.unsafe.stream.capability.enforce</name>
<value>false</value>
</property>
</configuration>
可能需要
# 删除多余的slf4j
mv /usr/local/hbase/lib/client-facing-thirdparty/log4j-slf4j-impl-2.17.2.jar /usr/local/hbase/lib/client-facing-thirdparty/log4j-slf4j-impl-2.17.2.jar.bak
ERROR: org.apache.hadoop.hbase.ipc.ServerNotRunningYetException: Server is not running yet
修改/usr/local/hbase/conf/hbase-env.sh
export HBASE_DISABLE_HADOOP_CLASSPATH_LOOKUP=true
启动Hbase
/usr/local/hbase/bin/start-hbase.sh
hbase shell
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· DeepSeek 开源周回顾「GitHub 热点速览」
· 记一次.NET内存居高不下排查解决与启示
· 物流快递公司核心技术能力-地址解析分单基础技术分享
· .NET 10首个预览版发布:重大改进与新特性概览!
· .NET10 - 预览版1新功能体验(一)