import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import java.util.Arrays;
import java.util.List;
/**
* count 算子:
* 统计一下RDD中有多少数据量
*/
public class CountOperator {
public static void main(String[] args) {
SparkConf conf =new SparkConf().setMaster("local").setAppName("count");
JavaSparkContext sc = new JavaSparkContext(conf);
List<String> names= Arrays.asList("w1","w2","w3","w4");
JavaRDD<String> nameRdd = sc.parallelize(names);
long dataNum = nameRdd.count();
System.err.println(dataNum);
}
}
微信扫描下图二维码加入博主知识星球,获取更多大数据、人工智能、算法等免费学习资料哦!