文档地址
http://www.mashibing.com/hive.html
一、上传并解压hive压缩包
#cd /usr/local
#tar -xvf apache-hive-2.1.1-bin.tar.gz
#ls
#mv apache-hive-2.1.1-bin hive
二、配置文件
#cd hive
#vi /etc/profile
export HADOOP_HOME=/usr/local/hadoop
export HIVE_HOME=/usr/local/hive
export PATH=$PATH:/usr/local/hadoop/bin:/usr/local/hadoop/sbin
export PATH=$PATH:$HIVE_HOME/bin
#source /etc/profile
#echo $HIVE_HOME
#hive
#rm -rf m*_db
三、修改配置文件
#cd conf
#cp hive-default.xml.template hive-site.xml
//用记事本++打开hive-site.xml
把hive.metastore.schema.verification里面的true改为false
把${system:java.io.tmpdir}改为/usr/local/hive/tmm 全部替换
把${system:user.name} 改为root 全部替换
#cd ..
#mkdir tmm
#schematool -initSchema -dbType derby //格式化
四、启动hive
#hive
>create table wordcount(line string);
#hadoop fs -mkdir /wcinput/
#hadoop fs -put input.txt /wcinput/
>load data inpath '/wcinput/' overwrite into table wordcount;
>desc wordcount;
>select * from wordcount;
>select split(line, ' ') from wordcount;
>select explode(split(line, ' ')) from wordcount;
>select explode(split(line, ' ')) as word from wordcount;
>select word,count(*) as count from (select explode(split(line, ' ')) as word from wordcount) w group by word;
>create table word_counts as select word, count(1) as count from (select explode(split(line, ' ')) as word from wordcount) w group by word order by word;
>select * from word_counts;
#hadoop fs -put ./sougou.dic /
#hadoop fs -ls /
>create table sougou (qtime string, qid string, qword string, url string) row format delimited fields terminated by ',';
>load data inpath '/sougou.dic' into table sougou;
>select count(*) from sougou;
>create table sougou_results as select keyword, count(1) as count from (select qword as keyword from sougou) t group by keyword order by count desc;
>select * from sougou_results limit 10;
网友评论