大数据系列4:Hive – 基于HADOOP的数据仓库

原创
2015/04/06 19:07
阅读数 598

wget http://mirror.bit.edu.cn/apache/hive/hive-0.11.0/hive-0.11.0-bin.tar.gz

tar -xzvf hive-0.11.0-bin.tar.gz

cd hive-0.11.0-bin

sudo vi /etc/profile

增加:

export HIVE_HOME=/home/ysc/hive-0.10.0-bin

export PATH=$PATH:$HIVE_HOME/bin

       source /etc/profile

hadoop fs -mkdir       /tmp

hadoop fs -mkdir       /user/hive/warehouse

hadoop fs -chmod g+w   /tmp

hadoop fs -chmod g+w   /user/hive/warehouse

cp conf/hive-log4j.properties.template conf/hive-log4j.properties

如使用local模式:SET mapred.job.tracker=local;

使用HADOOP集群(默认):SET mapred.job.tracker=host001:9001;

       本地使用hive服务:

       hive(如出现错误:Missing Hive Builtins Jar:/home/ysc/hive-0.11.0-bin/lib/hive-builtins-*.jar,则需要重启sudo reboot)

       命令行执行HiveQL命令:创建表、准备文本数据、导入、查询

创建hive表:

create table demo (key int, value string) row format delimited fields terminated by '=' stored as textfile;

加载数据到demo 表:

load data local inpath '/home/ysc/hive-0.11.0-bin/data.txt' into table demo;

查询:

select * from demo;

select * from demo where key>=100 and key<=120;

select *,count(*) as fre from demo group by value order by fre desc;

    配置Metastore使用MySQL

       sudo apt-get install mysql-server mysql-client

       GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' WITH GRANT OPTION;

sudo vi /etc/mysql/my.cnf

       注释bind-address           = 127.0.0.1

sudo service mysql restart

       mysql -uroot -pysc

       vi conf/hive-site.xml

       内容为:

<?xmlversion="1.0"?>

<?xml-stylesheettype="text/xsl" href="configuration.xsl"?>

<configuration>

       <!--使用mysql-->

       <property>

         <name>javax.jdo.option.ConnectionURL</name>     

<value>jdbc:mysql://host001:3306/hive?createDatabaseIfNotExist=true</value>

       </property>

       <property>

         <name>javax.jdo.option.ConnectionDriverName</name>

         <value>com.mysql.jdbc.Driver</value>

       </property>

       <property>

         <name>javax.jdo.option.ConnectionUserName</name>

         <value>root</value>

       </property>

       <property>

         <name>javax.jdo.option.ConnectionPassword</name>

         <value>ysc</value>

       </property>

       <!--使用hwi-->

       <property>

         <name>hive.hwi.listen.host</name>

         <value>0.0.0.0</value>

       </property>

       <property>

         <name>hive.hwi.listen.port</name>

         <value>9999</value>

       </property>

       <property>

         <name>hive.hwi.war.file</name>

         <value>lib/hive-hwi-0.11.0.war</value>

       </property>

       <!--使用metastore-->

       <property>

         <name>hive.metastore.uris</name>

         <value>thrift://host001:9083</value>

       </property>

</configuration>

       mysql-connector-java-5.1.18.jar放置到hive-0.10.0-bin/lib目录

   启动独立Metastore服务

       hive --service metastore  &

    启动独立Hive server服务

       hive --service hiveserver &

       远程使用hive服务

hive -h host001 -p 10000

   启动Hive Web Interface(HWI)服务

       hive --service hwi &

       http://host001:9999/hwi/

   Hive JDBC编程

       hadoop-core-1.1.2.jar以及HIVE_HOME/lib/*.jar加入构建路径

  publicstaticvoid main(String[] args) throws Exception {

      Class.forName("org.apache.hadoop.hive.jdbc.HiveDriver");

      Connection con = DriverManager.getConnection("jdbc:hive://host001:10000/default");

      String sql = "select * from person";

      PreparedStatement pst = con.prepareStatement(sql);

      ResultSet rs = pst.executeQuery();

      while(rs.next()){

         System.out.println(rs.getString(1)+" "+rs.getString(2));

      }

   }

    Hcatalog

sudo vi /etc/profile

增加:

export HADOOP_HOME=/home/ysc/hadoop-1.2.1

export HCAT_HOME=/home/ysc/hive-0.11.0-bin/hcatalog

export HCAT_PREFIX=$HCAT_HOME

export METASTORE_PORT=9083

export HCAT_LOG_DIR=/home/ysc/hive-0.11.0-bin/hcatalog/logs

export PATH=$PATH:$HCAT_HOME/bin:$HCAT_HOME/sbin

       source /etc/profile

mkdir /home/ysc/hive-0.11.0-bin/hcatalog/logs

chmod +x /home/ysc/hive-0.11.0-bin/hcatalog/bin/hcat

chmod +x /home/ysc/hive-0.11.0-bin/hcatalog/sbin/*.sh

hcat -e "create table test(id int, value string)"

hcat -e "drop table test"

hcat -e "show tables"

hcat -e "desc test"

hcat_server.sh start & (注意不要启动后面的命令:hive --service metastore  &)

hcat_server.sh stop

WebHCat(HCatalogREST API)

sudo vi /etc/profile

增加:

export HADOOP_CONF_DIR=$HADOOP_HOME/conf

export HADOOP_PREFIX=$HADOOP_HOME

export TEMPLETON_HOME=/home/ysc/hive-0.11.0-bin/hcatalog

       source /etc/profile

hadoop fs -put/home/ysc/hadoop-1.2.1/contrib/streaming/hadoop-streaming-1.2.1.jar /apps/templeton/hadoop-streaming-1.2.1.jar

hadoop  fs  -put  /home/ysc/pig-0.11.1.tar.gz  /apps/templeton/pig-0.11.1.tar.gz

hadoop  fs  -put  /home/ysc/hive-0.11.0-bin.tar.gz  /apps/templeton/hive-0.11.0-bin.tar.gz

hadoop fs -ls /apps/templeton

vi /home/ysc/hive-0.11.0-bin/hcatalog/etc/webhcat/webhcat-site.xml

输入:

<?xmlversion="1.0" encoding="UTF-8"?>

<configuration>

<property>

    <name>templeton.streaming.jar</name>

    <value>hdfs:///apps/templeton/hadoop-streaming-1.2.1.jar</value>

  </property>

  <property>

    <name>templeton.pig.archive</name>

    <value>hdfs:///apps/templeton/pig-0.11.1.tar.gz</value>

  </property>

  <property>

    <name>templeton.pig.path</name>

    <value>pig-0.11.1.tar.gz/pig-0.11.1/bin/pig</value>

  </property>

  <property>

    <name>templeton.hive.archive</name>

    <value>hdfs:///apps/templeton/hive-0.11.0-bin.tar.gz</value>

  </property>

  <property>

    <name>templeton.hive.path</name>

    <value>hive-0.11.0-bin.tar.gz/hive-0.11.0-bin/hive</value>

  </property>

  <property>

   <name>templeton.jar</name>    
 <value>
${env.TEMPLETON_HOME}/share/webhcat/svr/webhcat-0.11.0.jar</value>

  </property>

  <property>

   <name>templeton.hive.properties</name>    
 <value>
hive.metastore.local=false,hive.metastore.uris=thrift://host001:9083,hive.metastore.sasl.enabled=false</value>

  </property>

</configuration>

webhcat_server.sh start &

webhcat_server.sh stop

sudo apt-get install curl

curl -i 'http://host001:50111/templeton/v1/status'

curl -i 'http://host001:50111/templeton/v1/ddl/database/default/table/test?user.name=root'

curl -i -d user.name=root \

       -d rename=test2 \

      'http://localhost:50111/templeton/v1/ddl/database/default/table/test'

Hive命令:

       hive -e 'select * from demo'

 

hive -e 'select * from demo where key < 5'


HiveServer2

 

sudo vi /etc/profile

增加:

export HIVE_SERVER2_THRIFT_BIND_HOST=host001

export HIVE_SERVER2_THRIFT_PORT=10002

       source /etc/profile

       hadoop fs -chmod -R 777 /tmp

启动服务:hiveserver2 &或者hive --service hiveserver2 &

连接服务:beeline

beeline>!connect jdbc:hive2://host001:10002 root ysc org.apache.hive.jdbc.HiveDriver

0: jdbc:hive2://host001:10002>show tables;

0: jdbc:hive2://host001:10002>select * from students;

       当然也可以用JAVA借助JDBC调用

 

 

 

 

APDPlat旗下十大开源项目

 

 

 

 

 

 

展开阅读全文
打赏
0
0 收藏
分享
加载中
更多评论
打赏
0 评论
0 收藏
0
分享
返回顶部
顶部