基于之前的hive的基础语法的学习,以下是一个相对完整的练习案例:
第一步:创建基于maven的工程,hivest
第二步:配置pom.xml:
4.0.0
com.gongyunit
hivest
0.0.1-SNAPSHOT
jar
hivest
junit
junit
3.8.1
test
hive-jdbc
1.2.1
hadoop-common
2.6.4
第四步:在该包下创建类: HiveJDBCAPI 。具体代码如下:
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class HiveJDBCAPI {
public static Connection conn = null;
public static Statement stmt = null;
static {
try {
//应用代码时要讲下边的ip地址换成hive所在机器的时机地址
conn = DriverManager.getConnection("jdbc:hive2://127.0.0.1:10000/default", "", "");
stmt = conn.createStatement();
} catch (Exception e) {
e.printStackTrace();
}
}
// 创建hive数据库,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void createDataBase() {
try {
stmt.execute("create database hivedb");
} catch (SQLException e) {
e.printStackTrace();
}
}
// 删除hive数据库,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void dropDataBase() {
try {
stmt.execute("drop database hivedb");
} catch (SQLException e) {
e.printStackTrace();
}
}
// 创建hive数据库表,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void createTable(String tableName) {
try {
stmt.execute("create table "+tableName+ "( "
+ "name string, "
+ "des string "
+ ")"
+ " ROW FORMAT DELIMITED"
+ " FIELDS TERMINATED BY '\t'"
+ " STORED AS TEXTFILE");
} catch (SQLException e) {
e.printStackTrace();
}
}
// 删除hive数据库表,核心方法:execute,针对hive的DDL操作。不能用executeQuery方法
public static void dropTable(String tableName) throws SQLException {
String sql = "drop table " + tableName;
stmt.execute(sql);
}
// 显示特定的表是否存在
public static void showTables(String tableName) throws SQLException {
String sql = "show tables '" + tableName + "'";
ResultSet res = stmt.executeQuery(sql);
if (res.next()) {
}
}
// 显示表的字段属性
public static void describeTables(String tableName) throws SQLException {
String sql = "describe " + tableName;
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
}
}
// 通过本地文件,向hive中加载数据
public static void loadData(String tableName, String filepath) throws SQLException {
// 目录 ,我的是hive安装的机子的虚拟机的home目录下
String sql = "load data local inpath '" + filepath + "' into table " + tableName;
stmt.execute(sql);
}
// 查询表中的数据
public static void selectData(String tableName) throws SQLException {
String sql = "select * from " + tableName;
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
}
}
// hive的聚合函数查询表有多少条记录。
public static void countData(String tableName) throws SQLException {
String sql = "select count(*) from " + tableName;
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
}
}
// 利用selectWhere,查询表有多少条记录。
public static void selectWhere(String tableName) throws SQLException {
String sql = "select * FROM "+tableName+" WHERE name = 'mike2'";
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
}
}
public static void main(String args[]) {
try {
HiveJDBCAPI.createDataBase();
// HiveJDBCAPI.dropDataBase();
// HiveJDBCAPI.createTable("hivetb");
// HiveJDBCAPI.createTable("hivetb2");
// HiveJDBCAPI.dropTable("hivetb");
// HiveJDBCAPI.showTables("hivetb");
// HiveJDBCAPI.describeTables("hivetb");
// HiveJDBCAPI.loadData("hivetb",
// "/home/project/soft/hive/apache-hive-1.2.1-bin/bin/data.txt");
// HiveJDBCAPI.selectData("hivetb");
//HiveJDBCAPI.countData("hivetb");
//HiveJDBCAPI.selectWhere("hivetb");
} catch (Exception e) {
e.printStackTrace();
}finally{
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
第五步:执行main方法。借助eclipse的run as 即可。
领取专属 10元无门槛券
私享最新 技术干货