统计所有用户对每个学科的各个模块的访问量,取 Top2

package student

import java.net.URL

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}



object StuObject {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("stu").setMaster("local")
val sc = new SparkContext(conf)
val data: RDD[String] = sc.textFile("D:\\bigdata\\project\\scala2\\src\\main\\scala\\student\\access.txt")
val maps = data.map(t=>{
val url = t.split("\t")
val host: String = new URL(url(1)).getHost
val strings: Array[String] = host.split("\\.")
(strings(0),1)
})
val redy = maps.reduceByKey(_+_)
val sor = redy.sortBy(_._2,false)
val tak = sor.take(2)
tak.toBuffer.foreach(println)
}
}

数据
20161123101523  http://java.learn.com/java/javaee.shtml
20161123101523 http://java.learn.com/java/javaee.shtml
20161123101523 http://ui.learn.com/ui/video.shtml
20161123101523 http://bigdata.learn.com/bigdata/teacher.shtml
20161123101523 http://android.learn.com/android/video.shtml
20161123101523 http://h5.learn.com/h5/teacher.shtml
20161123101523 http://h5.learn.com/h5/course.shtml
20161123101523 http://bigdata.learn.com/bigdata/teacher.shtml
20161123101523 http://java.learn.com/java/video.shtml
20161123101523 http://bigdata.learn.com/bigdata/teacher.shtml
20161123101523 http://ui.learn.com/ui/course.shtml
20161123101523 http://bigdata.learn.com/bigdata/teacher.shtml
20161123101523 http://h5.learn.com/h5/course.shtml
20161123101523 http://java.learn.com/java/video.shtml
20161123101523 http://ui.learn.com/ui/video.shtml
20161123101523 http://h5.learn.com/h5/course.shtml
20161123101523 http://h5.learn.com/h5/teacher.shtml
20161123101523 http://bigdata.learn.com/bigdata/teacher.shtml
20161123101523 http://bigdata.learn.com/bigdata/video.shtml


pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>scala</groupId>
<artifactId>scala</artifactId>
<version>1.0-SNAPSHOT</version>
<!--<properties>-->
<!--<maven.compiler.source>1.8</maven.compiler.source>-->
<!--<maven.compiler.target>1.8</maven.compiler.target>-->
<!--<encoding>UTF-8</encoding>-->
<!--<scala.version>2.11.8</scala.version>-->
<!--</properties>-->

<!--<dependencies>-->
<!--<dependency>-->
<!--<groupId>org.scala-lang</groupId>-->
<!--<artifactId>scala-library</artifactId>-->
<!--<version>${scala.version}</version>-->
<!--</dependency>-->
<!--&lt;!&ndash;<dependency>&ndash;&gt;-->
<!--&lt;!&ndash;<groupId>org.scala-lang</groupId>&ndash;&gt;-->
<!--&lt;!&ndash;<artifactId>scala-actors</artifactId>&ndash;&gt;-->
<!--&lt;!&ndash;<version>2.11.8</version>&ndash;&gt;-->
<!--&lt;!&ndash;</dependency>&ndash;&gt;-->
<!--<dependency>-->
<!--<groupId>com.typesafe.akka</groupId>-->
<!--<artifactId>akka-actor_2.11</artifactId>-->
<!--<version>2.3.14</version>-->
<!--</dependency>-->

<!--<dependency>-->
<!--<groupId>com.typesafe.akka</groupId>-->
<!--<artifactId>akka-remote_2.11</artifactId>-->
<!--<version>2.3.14</version>-->
<!--</dependency>-->
<!--</dependencies>-->
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<encoding>UTF-8</encoding>
<scala.version>2.10.6</scala.version>
<spark.version>1.6.3</spark.version>
<hadoop.version>2.6.4</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.10</artifactId>
<version>2.3.14</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.10</artifactId>
<version>2.3.14</version>
</dependency>

<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>${spark.version}</version>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.38</version>
</dependency>
</dependencies>

<build>
<sourceDirectory>src/main/spark</sourceDirectory>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.2</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
<configuration>
<args>
<arg>-dependencyfile</arg>
<arg>${project.build.directory}/.scala_dependencies</arg>
</args>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass></mainClass>
</transformer>
</transformers>
</configuration> </execution> </executions> </plugin> </plugins> </build></project>


posted @ 2019-02-21 21:05  VIP8cnbl  阅读(244)  评论(0编辑  收藏  举报