数据导入:
向表中装载数据
hive> load data [local] inpath '/opt/module/datas/student.txt'
overwrite | into table student [partition (partcol1=val1,…)];
hive> load data local inpath '/data/hive/student.txt'
into table stu_partition partition(month=20200702);
hive> create table student3(id int,name string)
> row format delimited fields terminated by '\t';
1. 加载本地数据到表:
hive> load data local inpath '/data/hive/student.txt' overwrite into table student3;
hive> load data local inpath '/data/hive/student.txt' into table student3;
2. 加载hdfs数据到表:
上传本地文件到hdfs根目录
(base) [root@big01 hive]# hadoop fs -put student.txt /
hive> load data inpath '/student.txt' into table student3;
insert
hive>insert into table student partition(month='201709') values(1,'wangwu');
hive> insert into table stu2 partition(month=202006,day=26)
> select * from student3;
hive> from student3
> insert into table stu2 partition (month=202006,day=24)
> select *
> insert into table stu2 partition (month=202006,day=28)
> select * ;
create table if not exists student3
as select id, name from student;
创建表时通过Location指定加载数据路径
hive> create table student2 like student3;
hive> dfs -put /data/hive/student.txt /user/hive/warehouse/student2;
hive> select * from student2;
直接利用文件原位置创建外部表:
hive> create EXTERNAL table student4 like student
> location '/student.txt';
hive> select * from student4;
数据导出
1. 数据导出到本地
hive> insert overwrite local directory '/data/hive/hstudent1'
> row format delimited fields terminated by '\t'
> select * from student;
2. Hadoop命令导出到本地
hive> dfs -get /user/hive/warehouse/student/month=201709/000000_0
/data/hive/hstudent1;
3. hive 命令导出
$ bin/hive -e 'select * from default.student;' >
/data/hive/hstudent1/student4.txt;
4. export 导出
hive> export table student to '/hstudent2';
hive> import table student5 from '/hstudent2';
5. 数据导出到hdfs(没有local)
hive> insert overwrite directory '/hstudent1'
> row format delimited fields terminated by '\t'
> select * from student;
清除表中数据(Truncate)
Truncate只能删除管理表,不能删除外部表中数据
hive > truncate table student;
网友评论