import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.VoidFunction; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; class Map_test implements Function<String,String>{ @Override public String call(String s) throws Exception{ //方式三 return s+"方式三"; } } public class java_spark_function { public static void main(String[] args) { SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("spark_java_function"); JavaSparkContext sc = new JavaSparkContext(conf); List<String> list = Arrays.asList("a","b","c","d"); JavaRDD<String> parallelize = sc.parallelize(list); parallelize.map( // 方式一 x->x+"方式一" ).map( // 方式二 new Function<String, String>() { @Override public String call(String s) throws Exception { return s+"方式二"; } } ).map( new Map_test() ).collect().forEach(x ->System.out.println(x)); parallelize.flatMap( new FlatMapFunction<String, Integer>() { @Override public Iterator<Integer> call(String s) throws Exception { List<Integer> l = new ArrayList(); if(s=="a"){ l.add(1); }else if(s.toString().equals("b")){ l.add(2); }else{ l.add(3); } return l.iterator(); } } ).filter( new Function<Integer, Boolean>() { @Override public Boolean call(Integer s) throws Exception { if(s<3){ return true; } return false; } } ).foreach( new VoidFunction<Integer>() { @Override public void call(Integer integer) throws Exception { System.out.println("输出的结果"+integer); } } ); } }
我相信努力的人运气不会差,未来会给我一双梦想的翅膀