shell 脚本

今天花了一天时间参照师兄和康康的代码终于写好了一个能运行的项目脚本,终于可以用脚本来运行项目了哈哈

特别是复杂的项目,用脚本驱动更能体现优势

见代码

input.sh

 1 #!/bin/bash
 2 set –eu         
 3 current_home=$(readlink -f $(dirname $0))/..  #获取主路径
 4 source $current_home/conf/app.conf     #关联其他脚本
 5 source $current_home/scripts/app.rc
 6 function usage() { 
 7 echo "Usage: $0" >&2   #重定向为标准错误
 8 exit $1
 9 }
10 cur_date=$(date +%Y%m%d) 
11 while getopts "h" opt; do
12 case $opt in
13 h) uage 0;;
14 \?) echo "Invalid option: -$OPTARG" >&2; usage 1;; 
15 esac
16 done
17 shift $((OPTIND-1))
18 #pre_date=$(date -d "$cur_date -1 day" + %Y%m%d)  
19 input_dir=$current_home/scripts/input  
20 input=$input_dir/input.sh 
21 echo "$(getdate) > BEGIN Build Input" 2>&1 #重定向为标准输出
22 echo "$(getdate) > STAR Build Input" 2>&1
23 sh $input $cur_date  #运行该目录下的脚本
24 echo "$(getdate) > FINISH Build Input" 2>&1
25 echo "$(getdate) > END Build Input" 2>&1

input/input.sh

 1 #!/bin/bash
 2 #set -eu
 3 current_home=$(readlink -f $(dirname $0))/../..
 4 source $current_home/conf/app.conf
 5 source $current_home/scripts/app.rc
 6 function usage() {
 7 echo "Usage: $0" >&2
 8 exit $1
 9 }
10 class=/ClassPath/
11 job_name=$(basename ${0%.sh})
12 job_conf=$(get_job_conf $0)
13 libjars=$exec_jar
14 force=true
15 cur_date=$1
16 #pre_date=$(date -d "$cur_date -1 day" +%Y%m%d)
17 inputdata=/InPutDateDir/
18 outputdata=$app_prefix/out
19 busy_file=$outputdata.busy
20 done_file=$outputdata.done
21 $force && hrm $done_file && hrm $busy_file
22 hexist $busy_file || hexist $done_file && exit
23 htouch $busy_file
24 hrmr $outputdata
25 hjar $exec_jar $class $inputdata $outputdata #运行—>hadoop jar
26 htouch $done_file

app.conf 设置变量

 1 export JAVA_HOME="/home/yunti/java-current"
 2 # hadoop conf
 3 hadoop_home=/home/yunti/hadoop-current
 4 hadoop_exec=$hadoop_home/bin/hadoop
 5 hadoop_exec_conf=$HOME/hadoop/conf
 6 # libjar conf
 7 exec_jar=$current_home/jar/snapshot.jar
 8 # io conf
 9 app_prefix=/HDFSHome/
10 #local path conf
11 log_dir=$current_home/log

hadoop.rc  hadoop指令

  1 # Hadoop resource file #运行hadoop指令 “$@”输入的全部指令
  2 function hadoop() {
  3 if [ -n "$hadoop_exec_conf" ]; then
  4 $hadoop_exec --config $hadoop_exec_conf "$@"
  5 else
  6 $hadoop_exec "$@"
  7 fi
  8 }
  9 function hput() {
 10 hadoop fs -put "$@"
 11 }
 12 function hget() {
 13 hadoop fs -get "$@"
 14 }
 15 function hcat() {
 16 hadoop fs -cat "$@"
 17 }
 18 function hls() {
 19 hadoop fs -ls "$@"
 20 }
 21 function htext() {
 22 hadoop fs -text "$@"
 23 }
 24 function hgetmerge() {
 25 hadoop fs -getmerge "$@"
 26 }
 27 function htouch() {
 28 hadoop fs -touchz "$@" >/dev/null 2>&1
 29 return 0
 30 }
 31 function hexist() {
 32 hadoop fs -test -e "$@"
 33 }
 34 function hrmr() {
 35 if hexist "$@"; then
 36 hadoop fs -rmr "$@" >/dev/null 2>&1
 37 fi
 38 }
 39 function hrm() {
 40 if hexist "$@"; then
 41 hadoop fs -rm "$@" >/dev/null 2>&1
 42 fi
 43 }
 44 function hmv() {
 45 hadoop fs -mv "$@"
 46 }
 47 function hmkdir() {
 48 hadoop fs -mkdir "$@"
 49 }
 50 
 51 function hcp() {
 52 hadoop fs -cp "$@" >/dev/null 2>&1
 53 }
 54 function hsetrep() {
 55 hadoop fs -setrep "$@" >/dev/null 2>&1
 56 }
 57 function hdfs_part_num() {
 58 if hexist "$@"; then
 59 hadoop fs -ls "$@" | grep 'part-' | wc -l
 60 fi
 61 }
 62 function hdfs_size() {
 63 if hexist "$@"; then
 64 hadoop fs -dus "$@" | grep "$@" | awk '{print $2;}'
 65 fi
 66 }
 67 function hdfs_time() {
 68 if hexist "$@"; then
 69 hadoop fs -ls "$@" | grep "$@" | awk '{print $6","$7}'
 70 fi
 71 }
 72 function hdfs_check() {
 73 path=$1
 74 num_parts=$2
 75 min_size=$3
 76 parts=$(hadoop fs -ls $path | grep 'part-' | wc -l)
 77 size=$(hadoop fs -dus $path | awk '{print $2}')
 78 if [[ $parts == $num_parts && $size -ge $min_size || \
 79 min_size == 0 ]]; then
 80 return 0
 81 else
 82 return 1
 83 fi
 84 }
 85 function get_counter() {
 86 cat $1 | grep 'INFO mapred.JobClient: ' | \
 87 sed 's/.*INFO mapred.JobClient: //' > $2
 88 }
 89 function hjar() {
 90 local tag_file=$log_dir/${job_name/: /.}.$cur_date.tag
 91 local log_file=$log_dir/${job_name/: /.}.$cur_date.log
 92 local counter_file=$log_dir/${job_name/: /.}.$cur_date.cnt
 93 touch $tag_file
 94 hadoop jar "$@" 2>&1 | tee $log_file
 95 local status=${PIPESTATUS[0]}
 96 get_counter $log_file $counter_file
 97 return $status
 98 }
 99 function hdistcp() {
100 hadoop distcp "$@"
101 }
102 function hstat() {
103 hadoop fs -stat "$@" >/dev/null 2>&1
104 }
105 function dist_pull() {
106 if [ $# -lt 2 ]; then
107 echo "$(getdate) > invalid number of argument"
108 return 1
109 fi
110 local src=$1
111 local dest=$2
112 local log_dir=${dest}_distcp_logs
113 if [ $# -gt 2 ]; then
114 log_dir=$3
115 fi
116 hexist $log_dir && hrmr $log_dir
117 hdistcp -Dmapred.map.child.java.opts="-Djava.system.class.loader=org.apache.hadoop.mapred.DeAppClassLoader -Xmx200m" \
118 -libjars $hadoop_jar -i -overwrite -delete -m $distcp_map_num -log $log_dir $src $dest
119 local ret=$?
120 if [ $ret == 0 ]
121 then
122 hadoop fs -chmod -R +r $dest
123 fi
124 return $ret
125 }
View Code

build.sh 编译生成项目

1 ##build.sh
2 #!bin/bash 
3 
4 mvn clean package 
5 rm -f jar/*.jar 
6 cp target/*.jar jar/snapshot.jar

 

posted @ 2014-07-03 19:44  gray035  阅读(426)  评论(0编辑  收藏  举报