Spark环境配置

Spark环境配置

spark-env.sh

#JAVA_HOME
export JAVA_HOME=/export/servers/jdk1.8.0_271

#Spark Master Address
#export SPARK_MASTER_HOME=node01
export SPARK_MASTER_PORT=7077

#Spark History
export SPARK_HISTORY_OPTS="-Dspark.history.ui.port=4000 -Dspark.history.retainedApplication=3 -Dspark.history.fs.logDirectory=hdfs://node01:8020/spark_log"

#Spark RunTime
export SPARK_DAEMON_JAVA_OPTS="-Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=node01:2181,node02:2181,node03:2181 -Dspark.deploy.zookeeper.dir=/spark" #通过Zookeeper管理集群状态

slaves

#配置从节点
node01
node02
node03

spark-default.conf

#开启日志功能,并保存在hdfs中
spark.eventLog.enabled   true
spark.eventLog.dir       hdfs://node01:8020/spark_log
spark.eventLog.compress  true

Spark节点启动

#在主节点启动Master、Worker节点
sbin/start-all.sh
sbin/start-history-server.sh
#在某一从节点启动Master
sbin/start-master.sh
posted @ 2021-02-03 17:02  公鸡不下蛋  阅读(137)  评论(0编辑  收藏  举报