求二次排序

方式一

public class SecondSortTest {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("secondSort");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        JavaRDD<String> lineRDD = jsc.textFile("data/secondSort");

        lineRDD
        .mapToPair(new PairFunction<String, SecondSortKey, String>() {
            @Override
            public Tuple2<SecondSortKey, String> call(String s) throws Exception {
                String[] split = s.split(" ");
                SecondSortKey secondSortKey = new SecondSortKey(Integer.parseInt(split[0]), Integer.parseInt(split[1]));
                return new Tuple2<>(secondSortKey, s);
            }
        })
        .sortByKey()
        .foreach(new VoidFunction<Tuple2<SecondSortKey, String>>() {
            @Override
            public void call(Tuple2<SecondSortKey, String> secondSortKeyStringTuple2) throws Exception {
                System.out.println(secondSortKeyStringTuple2);
                }
            });

        jsc.stop();
    }
}

public class SecondSortKey implements Serializable, Comparable<SecondSortKey> {
    private int first;
    private int second;

    public SecondSortKey(int first, int second) {
        this.first = first;
        this.second = second;
    }

    public SecondSortKey() {
    }

    public int getFirst() {
        return first;
    }

    public void setFirst(int first) {
        this.first = first;
    }

    public int getSecond() {
        return second;
    }

    public void setSecond(int second) {
        this.second = second;
    }

    @Override
    public int compareTo(SecondSortKey other) {
        if (first - other.getFirst() == 0) {
            System.out.println("diaoyng 1");
            return second - other.getSecond();
        } else {
            System.out.println("diaoyong 2");
            return first - other.getFirst();
        }
    }
}

方式二

class secondSort {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local").setAppName("secondsort")
    val sc = new SparkContext(conf)
    val lineRDD = sc.textFile("data/secondSort")
    lineRDD.map(x=>{
      val strings = x.split(" ")
      (new SecondSortKeyScala(strings(0).toInt, strings(1).toInt), x)
    })
    .sortByKey()
    .foreach(x=>{println(x._2)})

    sc.stop()
  }
}

class SecondSortKeyScala(val first: Int, val second: Int) extends Serializable with Ordered[SecondSortKeyScala] {
  override def compare(that: SecondSortKeyScala): Int = {
    val i = this.first - that.first
    if (i == 0) {
      return this.second - that.second
    }
    i
  }
}
posted @ 2022-06-20 16:26  jsqup  阅读(16)  评论(0编辑  收藏  举报