通过hadoop上的hive完成WordCount

启动hadoop

start-all.sh

 

Hdfs上创建文件夹

hdfs dfs -mkdir wcinput
hdfs dfs -ls /user/hadoop

 

上传文件至hdfs

hdfs dfs -put ./1.txt wcinput
hdfs dfs -ls /user/hadoop/wcinput

 

启动Hive

hive

 

创建原始文档表

create table docs(line string);

 

导入文件内容到表docs并查看

load data inpath '/user/hadoop/wcinput/1.txt' overwrite into table docs;
select * from docs;

 

用HQL进行词频统计,结果放在表word_count里

create table word_count as select word,count(1) as count from (se
lect explode(split(line,' ')) as word from docs) word group by word order by word;

 

查看统计结果

 show tables;
select * from word_count;
 posted on 2018-05-17 13:20  Runka  阅读(112)  评论(0编辑  收藏  举报