Hive的DDL操作

数据库的DDL操作

#1.创建数据库
create
database db_hive; create database if not exists db_hive;

#2.显示所有的数据
show databases;

#3.查询数据库
show databases like 'db_hive*';

#4.查看数据库详情
desc database [extended] db_hive; #extended 可选 详细信息

#5.切换当前数据库
use db_hive;

#6.删除数据库
drop database db_hive;#如果不存在数据库删除会报错
drop database if exists db_hive;#如果数据库有表 会报错
drop database if exists db_hive cascade;#强制删除有表的数据库

表的DDl操作

#1.建表语法 有顺序要求
CREATE [EXTERNAL] TABLE [IF NOT EXISTS] table_name
[(col_name data_type [COMMENT col_comment], ...)]
[COMMENT table_comment]
[PARTITIONED BY (col_name data_type [COMMENT col_comment], ...)]
[CLUSTERED BY (col_name, col_name, ...)
  [SORTED BY (col_name [ASC|DESC], ...)] INTO
  num_buckets BUCKETS]
[ROW FORMAT row_format] row format delimited fields terminated by "分隔符"
[STORED AS file_format]
[LOCATION hdfs_path]

#EXTERNAL 外部表:删除表,元数据删除,hdfs数据不删除;内部表:删除表,元数据hdfs数据都删除
#col_name data_type 字段 字段的类型
#PARTITIONED BY 分区表
#CLUSTERED BY SORTED BY INTO num_buckets BUCKETS 分通标
#row format delimited fields terminated by "分隔符" 各字段间的分隔符
#STORED AS 指定创建表的存储格式 默认textfile
#LOCATION 表数据hdfs存放位置

#2.创建内部表   默认分隔符 \001 ascii码 非打印字符
create table stu(id int, name String);
#插入数据 强烈不推荐 会生成大量小文件
insert into stu(id,name) values(1,"zhangshan");
#load 加载本地(linux)数据到表
load data local inpath 'linuxpath' into table myhive.stu
#load 加载hdfs数据到表 overwrite 覆盖原文件 类似剪切,会把原文件位置更改到hive表的路径
load data inpath 'hdfspath' [overwrite] into table myhive.stu
#查询数据
select * from stu;

create table if not exists myhive.stu1(
id int,
name string)
row format delimited fields terminated by '\t'
stored as textfile
location '/user/stu1';

#3.查询法建表
create table if not exists myhive.stu2 as select id, name from stu; #拷贝数据建表
create table if not exists myhive.stu3 like stu;#复制stu的表结构,创建空表

#4.查看表信息 formatted 格式化信息
desc [formatted] myhive.stu;

#5.创建外部表 external -> 删除外部表,不会将hdfs中的表数据删除
create external table myhive.teacher(
t_id int,
t_name string)
row format delimited fields terminated by '\t';
#插入数据可以使用insert 但是不建议,建议使用load

#6.内部表与外部表转换
alter table stu set tblproperties('EXTERNAL'='TRUE'); #内部表转外部表
alter table stu set tblproperties('EXTERNAL'='FALSE'); #外部表转内部表

#7.创建分区表
#按月分区
create table myhive.score(
s_id string,
c_id string,
s_score int)
partitioned by (month string) #月份分区
row format delimited fields terminated by '\t';

#年月日三级分区
create table myhive.socre2(
s_id string,
c_id string,
s_score int)
partitioned by (year string, month string, day string)
row format delimited fields terminated by '\t';

#加载数据
load data local inpath 'linuxpath' into table score partition(month='201912');
load data local inpath 'linuxpath' into table score2 partition(year='2019',month='12',day='12');
#查看分区
show partitions score;

#添加分区
alter table socre add partition(month="201911");
#增加多个分区
alter table socre add partition(month="201910") partition(month="201909");
#删除分区
alter table score drop partition(month="201909");

#9.修复表元数据
#创建外部表后,把文件直接放到了表的存储路劲,但是元数据没有建立 修复后才可以查询
msck repair table score;
posted @ 2021-03-10 15:44  _蒲公英_  阅读(145)  评论(0编辑  收藏  举报