1)slaves

# A Spark Worker will be started on each of the machines listed below.
dataNode

2)spark-env.sh

export HADOOP_CONF_DIR=/home/hadoop/hadoop/etc/hadoop
export YARN_CONF_DIR=/home/hadoop/hadoop/etc/hadoop

3)启动(命令在$SPARK/sbin)

# start-all.sh

4)jps查看结果

# jps
25911 Master
26104 Worker
6218 Jps

5)查看集群状态

http://localhost:8080/


posted on 2015-12-01 13:58  #hanhui  阅读(116)  评论(0编辑  收藏  举报