登录 |  注册 |  繁體中文


java操作hive

分类: 人工智能&大数据 颜色:橙色 默认  字号: 阅读(1425) | 评论(0)

在使用 JDBC 开发 Hive 程序时,  必须首先开启 Hive 的远程服务接口。使用下面命令进行开启:


$HIVE_HOME/bin/hiveserver2

OR

hive -service hiveserver2 & 

#注意,1.0版本以后,用的是hiveserver2

hive server 1的driver classname是org.apache.hadoop.hive.jdbc.HiveDriver,

Hive Server 2的是org.apache.hive.jdbc.HiveDriver,这两个容易混淆。

1). 测试数据
userinfo.txt文件内容(每行数据之间用tab键隔开):

1    xiapi
2    xiaoxue
3    qingqing

2). 程序代码


import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

//import org.apache.log4j.Logger;

public class HiveClient {
        //hive1为 org.apache.hadoop.hive.jdbc.HiveDriver
	private static String driverName = "org.apache.hive.jdbc.HiveDriver"; 
	private static String url = "jdbc:hive2://192.168.163.110:10000/default";  //注意和hive1的区别
	private static String user = "hive";
	private static String password = "mysql";
	private static String sql = "";
	private static ResultSet res;
	//private static final Logger log = Logger.getLogger(HiveJdbcClient.class);

	public static void main(String[] args) {
		try {
			Class.forName(driverName);
			Connection conn = DriverManager.getConnection(url, user, password);
			Statement stmt = conn.createStatement();

			// 创建的表名
			String tableName = "testHiveDriverTable";
			
			// 第一步:存在就先删除 
			sql = "drop table " + tableName;
			stmt.executeQuery(sql);

			// 第二步:不存在就创建
			sql = "create table " + tableName + " (key int, value string)  row format delimited fields terminated by '	' 
			stmt.executeQuery(sql);


			// 执行“describe table”操作
			sql = "describe " + tableName;
			System.out.println("Running:" + sql);
			res = stmt.executeQuery(sql);
			System.out.println("执行“describe table”运行结果:");
			while (res.next()) {
				System.out.println(res.getString(1) + "	" + res.getString(2));
			}

			// 执行“load data into table”操作
			String filepath = "/home/hadoop/ziliao/userinfo.txt";
			sql = "load data local inpath " + filepath + " into table " + tableName;
			System.out.println("Running:" + sql);
			res = stmt.executeQuery(sql);

			// 执行“select * query”操作
			sql = "select * from " + tableName;
			System.out.println("Running:" + sql);
			res = stmt.executeQuery(sql);
			System.out.println("执行“select * query”运行结果:");
			while (res.next()) {
				System.out.println(res.getInt(1) + "	" + res.getString(2));
			}

			// 执行“regular hive query”操作, 执行错误时,注意看hiveserver2的输出结果
			sql = "select count(1) from " + tableName;
			System.out.println("Running:" + sql);
			res = stmt.executeQuery(sql);
			System.out.println("执行“regular hive query”运行结果:");
			while (res.next()) {
				System.out.println(res.getString(1));

			}

			conn.close();
			conn = null;
		} catch (ClassNotFoundException e) {
			e.printStackTrace();
			//log.error(driverName + " not found!", e);
			System.exit(1);
		} catch (SQLException e) {
			e.printStackTrace();
			//log.error("Connection error!", e);
			System.exit(1);
		}

	}
}

3). 运行结果(右击-->Run as-->Run on Hadoop)

    Running:show tables testHiveDriverTable
执行“show tables”运行结果:
testhivedrivertable
Running:describe testHiveDriverTable
执行“describe table”运行结果:
key    int
value    string
Running:load data local inpath /home/hadoop/ziliao/userinfo.txt into table testHiveDriverTable
Running:select * from testHiveDriverTable
执行“select * query”运行结果:
   xiapi
   xiaoxue
   qingqing
Running:select count(1) from testHiveDriverTable
执行“regular hive query”运行结果:

 



上一篇:互联网的合并   下一篇:Hive数据导出三种方式

姓 名: *
邮 箱:
内 容: *
验证码: 点击刷新 *   

回到顶部