spark config详解
1.spark-env.sh
测试环境配置客串
export SPARK_DIST_CLASSPATH=$(/usr/local/hadoop/bin/hadoop classpath)
export SCALA_HOME=/usr/local/scala
export JAVA_HOME=/opt/jdk1.8.0_65
export SPARK_MASTER=localhost
export SPARK_LOCAL_IP=localhost
export HADOOP_HOME=/usr/local/hadoop
export SPARK_HOME=/usr/local/spark
export SPARK_LIBARY_PATH=.:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$HADOOP_HOME/lib/native
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop
2.slaves
# A Spark Worker will be started on each of the machines listed below.
mercury
venus
mars
saturn