常用脚本
1.
分发脚本
#!/bin/bash #1. 判断参数个数 if [ $# -lt 1 ] then echo Not Enough Arguement! exit; fi #2. 遍历集群所有机器 for host in hadoop102 hadoop103 hadoop104 do echo ==================== $host ==================== #3. 遍历所有目录,挨个发送 for file in $@ do #4. 判断文件是否存在 if [ -e $file ] then #5. 获取父目录 pdir=$(cd -P $(dirname $file); pwd) #6. 获取当前文件的名称 fname=$(basename $file) ssh $host "mkdir -p $pdir" rsync -av $pdir/$fname $host:$pdir else echo $file does not exists! fi done done
#!/bin/bash if [ $# -lt 1 ] then echo "No args input" fi case $1 in "start") echo "================启动hadoop集群============" echo "---------------启动hdfs---------------" ssh hadoop102 "/opt/module/hadoop-3.1.3/sbin/start-dfs.sh" echo "---------------启动yarn---------------" ssh hadoop103 "/opt/module/hadoop-3.1.3/sbin/start-yarn.sh" echo "---------------启动historyserver---------------" ssh hadoop102 "opt/module/hadoop-3.1.3/bin/mapred --daemon start historyserver" ;; "stop") echo "================关闭hadoop集群============" echo "---------------关闭historyserver---------------" ssh hadoop102 "/opt/module/hadoop-3.1.3/bin/mapred --daemon stop historyserver" echo "---------------关闭yarn---------------" ssh hadoop103 "/opt/module/hadoop-3.1.3/sbin/stop-yarn.sh" echo "---------------关闭hdfs---------------" ssh hadoop102 "/opt/module/hadoop-3.1.3/sbin/stop-dfs.sh" ;; *) echo "no such method!" esac
有疑问可以加wx:18179641802,进行探讨