1)预先配置
在hive配置文件:%HIVE_HOME%/conf/hive-site.xml添加
<!-- 禁用 impersonation -->
<property>
<name>hive.server2.enable.doAs</name>
<value>false</value>
</property>
在 Hadoop 的配置文件中%HADOOP_HOME%/etc/hadoo/下的:core-site.xml 和 hdfs-site.xml添加
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
确保没有设置限制 root 用户的权限
修改访问数据库表person的权限
#hdfs dfs -chmod -R 775 /user/hive/warehouse/demo.db/person
由于Hive是数据仓库,而不是数据库,所以一般不支持增删改查,这里仅介绍如何通过Java来向Hive插入,查询数据。
2)代码
pom.xml文件
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.jerry</groupId>
<artifactId>hive</artifactId>
<version>0.0.1-SNAPSHOT</version>
<description>Java How to connect Hivi</description>
<dependencies>
<!-- Hive JDBC Driver -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>3.1.2</version>
</dependency>
<!-- Hadoop Common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.2.2</version>
</dependency>
<!-- Hadoop Client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.2.2</version>
</dependency>
</dependencies>
</project>
Java文件
package com.jerry;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
public class HiveClient {
private static final String DRIVER_CLASS = "org.apache.hive.jdbc.HiveDriver";
private static final String CONNECTION_URL = "jdbc:hive2://192.168.31.184:10000/demo";
private static PreparedStatement preparedstatement;
private static Statement statement;
private static ResultSet resultSet = null;
//链接
private Connection getConnection() throws SQLException {
try {
Class.forName(DRIVER_CLASS);
Connection con = DriverManager.getConnection(CONNECTION_URL);
statement = con.createStatement();
return con;
} catch (ClassNotFoundException e) {
e.printStackTrace();
throw new SQLException(e.getMessage());
}
}
//断开链接
public void disconnect(Connection con) throws SQLException {
// Close resources
resultSet.close();
statement.close();
con.close();
}
//执行查询
public ResultSet query(String query) throws SQLException {
// Execute a query
ResultSet resultSet = statement.executeQuery(query);
return resultSet;
}
//打印查询记录
public void printQueryResult(ResultSet resultSet) throws SQLException {
//获取 ResultSet 的元数据
ResultSetMetaData metaData = resultSet.getMetaData();
// 获取列数
int columnCount = metaData.getColumnCount();
while (resultSet.next()) {
for (int i=1;i<=columnCount;i++) {
System.out.print(resultSet.getString(i)+",");
}
System.out.println("");
}
}
//查询并且打印数据
public void queryAndPrint(String query) throws SQLException {
ResultSet resultSet = query(query);
printQueryResult(resultSet);
}
//添加数据
private void addDataToHiveTable(Connection con,String tableName,String[] newValue,String like,String map) {
try {
String insertSql = "INSERT INTO person SELECT ?,?,?,"+like+","+map;
preparedstatement = con.prepareStatement(insertSql);
preparedstatement.setInt(1, Integer.parseInt(newValue[0]));
preparedstatement.setString(2, newValue[1]);
preparedstatement.setInt(3, Integer.parseInt(newValue[2]));
preparedstatement.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws SQLException {
HiveClient hive = new HiveClient();
String tableName = "person";
String like = "array('basketball', 'music', 'dance')";
String map = "map('address','xxxx')";
String[] newAddValue = {"10","elite0","50"};
Connection con = hive.getConnection();
String query = "SELECT * FROM "+tableName;
try {
hive.queryAndPrint(query);
hive.addDataToHiveTable(con,tableName,newAddValue,like,map);
hive.queryAndPrint(query);
hive.disconnect(con);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
注意map,list处理要用Hive函数来处理。