#下载
wget https://dlcdn.apache.org/hive/hive-3.1.3/apache-hive-3.1.3-bin.tar.gz
# 解压
tar -zxvf apache-hive-3.1.3-bin.tar.gz
vim /etc/profile
export HIVE_HOME=/home/hadoop/bigdata/hive
export export PATH=$PATH:$HIVE_HOME/bin
cp hive-env.sh.template hive-env.sh
vim hive-env.sh
HADOOP_HOME=/home/hadoop/bigdata/hadoop
# Hive Configuration Directory can be controlled by:
# export HIVE_CONF_DIR=
export HIVE_CONF_DIR=/home/hadoop/bigdata/hive/conf
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://node01:3306/hive?createDatabaseIfNotExist=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.cj.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
<description>username to use against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>hive</value>
<description>password to use against metastore database</description>
</property>
<property>
<name>hive.cli.print.header</name>
<value>true</value>
<description>Whether to print the names of the columns in query output.</description>
</property>
<property>
<name>hive.cli.print.current.db</name>
<value>true</value>
<description>Whether to include the current database in the Hive prompt.</description>
</property>
<property>
<name>hive.server2.thrift.bind.host</name>
<value>node01</value>
</property>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.exec.dynamic.partition.mode</name>
<value>nonstrict</value>
</property>
</configuration>
cd $HIVE_HOME/
wget https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.30/mysql-connector-java-8.0.30.jar
复制hadoop最新guava包到hive lib*
cp $HADOOP_HOME//share/hadoop/common/lib/guava-27.0-jre.jar $HIVE_HOME/lib
5.初始化元数据库
schematool -initSchema -dbType mysql
mysql创建用户
CREATE USER 'hive'@'%' IDENTIFIED BY 'hive';
grant all privileges on hive.* to 'hive'@'%';
flush privileges;
cd ${HIVE_HOME}/bin;nohup ${HIVE_HOME}/bin/hiveserver2 > ${HIVE_HOME}/logs/hiveserver2.log 2>&1 &
# 正常方式
hive
# 使用hiveserver2(在启动服务后需要一两分钟后才能登录)
beeline -u jdbc:hive2://node01:10000/default -n hadoop
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。