spark-shell 常用命令
#!/bin/bash yesterday=$1 if [ ! $yesterday ] then yesterday=`date -d "-1 day" +%F` cur_date=`date +%F` fi echo yesterday=$yesterday
#!/bin/bash start=`expr $(cat /home/hadoop/*****.txt)` end=`expr $start + 15` echo $start,$end for ((start=$start;start<$end;start++)) do spark-submit \ --master yarn \ --deploy-mode cluster \ --num-executors 10 \ --driver-memory 3g \ --executor-memory 10g \ --executor-cores 2 \ --queue batch \ --conf spark.memory.fraction=0.9 \ --conf spark.memory.storageFraction=0.2 \ --conf spark.sql.shuffle.partitions=40 \ --conf spark.default.parallelism=40 \ --conf fs.oss.multipart.download.ahead.part.max.number=10 \ --conf fs.oss.multipart.download.threads=40 \ --conf spark.files.openCostInBytes=12000000 \ --conf spark.sql.files.openCostInBytes=12000000 \ --name 'ods_yuqing_civiw_i_w_his' \ --archives hdfs:///home/****/env/mypython3.zip \ --conf spark.pyspark.driver.python=./mypython3.zip/mypython3/bin/python3 \ --conf spark.pyspark.python=./mypython3.zip/mypython3/bin/python3 \ ossref://bigdata-shell/bigdata_code/***/***.py '2015' $start if [ $? -ne 0 ];then echo "错误了${start}" echo $start>/home/hadoop/***.txt # 单纯退出用break,continue如果报错需要用exit1 exit 1 else success=`expr $start + 1` echo $success>/home/hadoop/***.txt fi done