- 创建非sbt的scala项目
- 引入spark的jar包
File->Project Structure->Libararies引用spark-assembly-1.5.2-hadoop2.6.0.jar
import scala.math.random
import org.apache.spark._
/**
* Created by code-pc on 16/3/2.
*/
object test1 {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Spark Pi").setMaster("local")
val spark = new SparkContext(conf)
val slices = if (args.length > 0) args(0).toInt else 2
val n = 100000 * slices
val count = spark.parallelize(1 to n, slices).map { i =>
val x = random * 2 - 1
val y = random * 2 - 1
if (x*x + y*y < 1) 1 else 0
}.reduce(_ + _)
println("Pi is roughly " + 4.0 * count / n)
spark.stop()
}
}
File -> Project Structure -> Artifacts -> + -> Jars -> From moudles with dependencies
菜单栏:build Artifacts
.- -- -- .