Hive常用命令及作用

  • 1-创建表
-- 内部表
create table aa(col1 string,col2 int) partitioned by(statdate int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
-- 外部表
create external table bb(col1 string, col2 int) partitioned by(statdate int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' location '/user/gaofei.lu/';
  • 2-查看创建表语句
show create table tablename;
  • 3-导入表数据
本地数据:load data local inpath ' /home/gaofei.lu/aa.txt' into table aa partition(statdate=20170403)
hdfs上数据:load data inpath '/user/gaofei.lu/aa.txt' into table bb partition(statdate=20170403)
  • 4-修改表属性
alter table aa set tblproperties ('EXTERNAL'='TRUE')
alter table bb set tblproperties ('EXTERNAL'='FALSE')
  • 5-修改列
修改列名和列数据类型:alter table aa change col2 name string ;
修改位置放置第一位:alter table aa change col2 name string first;
修改位置指定某一列后面:alter table aa change col1 dept string after name;
  • 6-添加列(慎用)
alter table aa add columns(col3 string);
  • 7-表重命名
alter table aa rename to aa_test;
  • 8-添加分区
alter table aa add partition(statdate=20170404);
alter table bb add partition(statdate=20170404) location '/user/gaofei.lu/20170404.txt';
  • 9-查看表分区
show partitioins aa;
  • 10-修改分区
alter table aa partition(statdate=20170404) rename to partition(statdate=20170405);
alter table bb partition(statdate=20170404) set location '/user/gaofei.lu/aa.txt';
  • 11-删除分区
alter table aa drop if exists partition(statdate=20170404);
  • 12- beeline连接
beeline  !connect jdbc:hive2://192.168.1.17:10000
  • 13- 设置hive on spark
set hive.execution.engine=spark
  • 14-终止任务
yarn application -kill job_id
  • 15-指定分隔符导出文件
insert overwrite local directory '/home/hadoop/gaofeilu/test_delimited.txt'
row format delimited
fields terminated by '\t'
select * from test;
posted @ 2019-12-14 14:54  玩转大数据  阅读(670)  评论(0编辑  收藏  举报