1.启动hadoop

start-all.sh

 

2.Hdfs上创建文件夹

hdfs dfs -mkdir testinput
hdfs dfs -ls /user/hadoop

 

3.上传文件至hdfs

hdfs dfs -put ./test.txt testinput
hdfs dfs -ls /user/hadoop/testinput

4.启动Hive

    
hive

5.创建原始文档表

create table docs(line string);<br>show tables;

6.导入文件内容到表docs并查看

load data inpath '/user/hadoop/testinput/test.txt' overwrite into table docs;

7.用HQL进行词频统计,结果放在表word_count里

create table word_count as select word,count(1) from (select explode(split(line,' ')) as word from docs) word group by word;

8.查看统计结果

select * from word_count;

 

posted on 2018-05-16 09:30  224杨晓潮  阅读(119)  评论(0编辑  收藏  举报