| |
| |
| chmod 755 hadoop-3.2.1.tar.gz |
| |
| tar -zxvf hadoop-3.2.1.tar.gz |
| |
| vim /etc/profile |
| |
| export HADOOP_HOME=/home/software/hadoop-3.2.1 |
| export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin |
| |
| export HADOOP_CONF_DIR=/home/software/hadoop-3.2.1/etc/hadoop |
| export PATH=$PATH:$HADOOP_CONF_DIR/bin |
| export YARN_CONF_DIR=/home/software/hadoop-3.2.1/etc/hadoop |
| |
| |
| source /etc/profile |
| |
| cd /home/software/hadoop-3.2.1/etc/hadoop |
| |
| rm -rf core-site.xml hdfs-site.xml mapred-site.xml yarn-site.xml |
查看详情
| <?xml version="1.0" encoding="UTF-8"?> |
| <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| <configuration> |
| <property> |
| <name>fs.defaultFS</name> |
| <value>hdfs://hadoop01:9000</value> |
| </property> |
| <property> |
| <name>hadoop.tmp.dir</name> |
| <value>/home/software/hadoop-3.2.1/tmp</value> |
| </property> |
| <property> |
| <name>hadoop.proxyuser.root.hosts</name> |
| <value>*</value> |
| </property> |
| <property> |
| <name>hadoop.proxyuser.root.groups</name> |
| <value>*</value> |
| </property> |
| </configuration> |
| <?xml version="1.0" encoding="UTF-8"?> |
| <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| <configuration> |
| <property> |
| <name>dfs.replication</name> |
| <value>1</value> |
| </property> |
| <property> |
| <name>dfs.secondary.http.address</name> |
| <value>hadoop01:50070</value> |
| </property> |
| <property> |
| <name>dfs.webhdfs.enabled</name> |
| <value>true</value> |
| </property> |
| <property> |
| <name>dfs.support.append</name> |
| <value>true</value> |
| </property> |
| <property> |
| <name>dfs.webhdfs.broken.enabled</name> |
| <value>true</value> |
| </property> |
| <property> |
| <name>dfs.permissions</name> |
| <value>false</value> |
| </property> |
| </configuration> |
| <?xml version="1.0"?> |
| <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| <configuration> |
| <property> |
| <name>mapreduce.framework.name</name> |
| <value>yarn</value> |
| </property> |
| <property> |
| <name>yarn.nodemanager.aux-services</name> |
| <value>mapreduce_shuffle</value> |
| </property> |
| </configuration> |
| <?xml version="1.0"?> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| <configuration> |
| <property> |
| <name>yarn.resourcemanager.hostname</name> |
| <value>hadoop01</value> |
| </property> |
| <property> |
| <name>yarn.nodemanager.aux-services</name> |
| <value>mapreduce_shuffle</value> |
| </property> |
| |
| <property> |
| <name>yarn.log-aggregation-enable</name> |
| <value>true</value> |
| </property> |
| <property> |
| <name>yarn.log-aggregation.retain-seconds</name> |
| <value>640800</value> |
| </property> |
| <property> |
| <name>yarn.application.classpath</name> |
| <value>/home/software/hadoop-3.2.1/etc/hadoop:/home/software/hadoop-3.2.1/share/hadoop/common/lib/*:/home/software/hadoop-3.2.1/share/hadoop/common/*:/home/software/hadoop-3.2.1/share/hadoop/hdfs:/home/software/hadoop-3.2.1/share/hadoop/hdfs/lib/*:/home/software/hadoop-3.2.1/share/hadoop/hdfs/*:/home/software/hadoop-3.2.1/share/hadoop/mapreduce/lib/*:/home/software/hadoop-3.2.1/share/hadoop/mapreduce/*:/home/software/hadoop-3.2.1/share/hadoop/yarn:/home/software/hadoop-3.2.1/share/hadoop/yarn/lib/*:/home/software/hadoop-3.2.1/share/hadoop/yarn/*</value> |
| </property> |
| </configuration> |
| [root@master sbin] |
| 192.168.128.78 hadoop01 |
| |
| cd /home/software/hadoop-3.2.1 |
| mkdir -p data |
| [root@localhost software]# start-dfs.sh |
| Starting namenodes on [hadoop01] |
| ERROR: Attempting to operate on hdfs namenode as root |
| ERROR: but there is no HDFS_NAMENODE_USER defined. Aborting operation. |
| Starting datanodes |
| ERROR: Attempting to operate on hdfs datanode as root |
| ERROR: but there is no HDFS_DATANODE_USER defined. Aborting operation. |
| Starting secondary namenodes [hadoop01] |
| ERROR: Attempting to operate on hdfs secondarynamenode as root |
| ERROR: but there is no HDFS_SECONDARYNAMENODE_USER defined. Aborting operation. |
| vim /etc/profile |
| |
| export HDFS_NAMENODE_USER=root |
| export HDFS_DATANODE_USER=root |
| export HDFS_SECONDARYNAMENODE_USER=root |
| export YARN_RESOURCEMANAGER_USER=root |
| export YARN_NODEMANAGER_USER=root |
| |
| source /etc/profile |
| [root@localhost software]# start-dfs.sh |
| Starting namenodes on [hadoop01] |
| 上一次登录:一 12月 4 03:07:13 CST 2023pts/0 上 |
| hadoop01: Permission denied (publickey,gssapi-keyex,gssapi-with-mic,password). |
| Starting datanodes |
| 上一次登录:一 12月 4 03:08:55 CST 2023pts/0 上 |
| localhost: Permission denied (publickey,gssapi-keyex,gssapi-with-mic,password). |
| Starting secondary namenodes [hadoop01] |
| 上一次登录:一 12月 4 03:08:55 CST 2023pts/0 上 |
| hadoop01: Permission denied (publickey,gssapi-keyex,gssapi-with-mic,password). |
| |
| ssh-keygen -t rsa |
| |
| [root@hadoop01 ~] |
| [root@hadoop01 .ssh] |
| id_rsa id_rsa.pub known_hosts |
| |
| cat id_rsa.pub >> authorized_keys |
| |
| chmod 600 authorized_keys |
| [root@hadoop01 .ssh]# start-dfs.sh |
| Starting namenodes on [hadoop01] |
| 上一次登录:一 12月 4 03:19:04 CST 2023从 192.168.128.1pts/0 上 |
| hadoop01: ERROR: JAVA_HOME is not set and could not be found. |
| Starting datanodes |
| 上一次登录:一 12月 4 03:21:22 CST 2023pts/0 上 |
| localhost: ERROR: JAVA_HOME is not set and could not be found. |
| Starting secondary namenodes [hadoop01] |
| 上一次登录:一 12月 4 03:21:22 CST 2023pts/0 上 |
| hadoop01: ERROR: JAVA_HOME is not set and could not be found. |
| |
| cd /home/software/hadoop-3.2.1/etc/hadoop |
| |
| vim hadoop-env.sh |
| |
| |
| export JAVA_HOME=/home/software/jdk1.8.0_181 |
| export HADOOP_CONF_DIR=/home/software/hadoop-3.2.1/etc/hadoop |
| |
| |
| source hadoop-env.sh |
| |
| [root@hadoop01 hadoop] |
| Starting namenodes on [hadoop01] |
| 上一次登录:一 12月 4 03:21:23 CST 2023pts/0 上 |
| Starting datanodes |
| 上一次登录:一 12月 4 04:02:15 CST 2023pts/0 上 |
| Starting secondary namenodes [hadoop01] |
| 上一次登录:一 12月 4 04:02:17 CST 2023pts/0 上 |
| |
| |
| [root@hadoop01 hadoop] |
| 2083 NameNode |
| 2451 SecondaryNameNode |
| 2567 Jps |
| 2218 DataNode |
| |
| |
| [root@master software] |
| Active Internet connections (only servers) |
| Proto Recv-Q Send-Q Local Address Foreign Address State PID/Program name |
| tcp 0 0 192.168.128.78:9000 0.0.0.0:* LISTEN 3090/java |
| tcp 0 0 0.0.0.0:9870 0.0.0.0:* LISTEN 3090/java |
| tcp 0 0 192.168.128.78:50070 0.0.0.0:* LISTEN 3450/java |
| |
| |
| [root@hadoop01 hadoop] |
| Stopping namenodes on [hadoop01] |
| 上一次登录:一 12月 4 04:02:24 CST 2023pts/0 上 |
| Stopping datanodes |
| 上一次登录:一 12月 4 04:08:15 CST 2023pts/0 上 |
| Stopping secondary namenodes [hadoop01] |
| 上一次登录:一 12月 4 04:08:17 CST 2023pts/0 上 |
| |
| |
| [root@hadoop01 hadoop] |
| success |
| [root@hadoop01 hadoop] |
| success |
| |
| |
| telnet 192.168.128.78 9870 |
| |
| http://192.168.128.78:9870/ |

| |
| hdfs namenode -format |
| |
| cd /home/software/hadoop-3.2.1/logs |
| |
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 震惊!C++程序真的从main开始吗?99%的程序员都答错了
· 【硬核科普】Trae如何「偷看」你的代码?零基础破解AI编程运行原理
· 单元测试从入门到精通
· 上周热点回顾(3.3-3.9)
· winform 绘制太阳,地球,月球 运作规律
2021-12-01 从服务器下载文件到本地
2021-12-01 springboot导入(导出)excel