This problem is solved.
The JDBC client need to execute "add jar" operation to add
"hive_hbase-handler.jar", which is needed by the hive-server to run
map/reduce job.
The demo hive-hbase integration client code is below:
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
public class HiveJdbcClient {
private static String driverName =
"org.apache.hadoop.hive.jdbc.HiveDriver";
/**
* @param args
* @throws SQLException
*/
public static void main(String[] args) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
Connection con = DriverManager.getConnection(
"jdbc:hive://2.1.37.110:10000/default", "",
"");
Statement stmt = con.createStatement();
String tableName = "hive_zsf11";
// add needed jar: "hive_hbase-handler.jar"
String sql = "add jar
/opt/hadoop/hive/lib/hive_hbase-handler.jar";
System.out.println("Running: " + sql);
ResultSet res = stmt.executeQuery(sql);
// drop exist table
System.out.println("drop table: " + tableName);
stmt.executeQuery("drop table " + tableName);
// create table
System.out.println("create table: " +
tableName);
sql = "create table "
+ tableName
+ "(id int, name string) STORED BY
'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES
('hbase.columns.mapping' = ':key,cf1:val') TBLPROPERTIES ('hbase.table.name'
= '"
+ tableName + "' )";
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
// show tables
sql = "show tables '" + tableName + "'";
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
if (res.next()) {
System.out.println(res.getString(1));
}
// describe table
sql = "describe " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" +
res.getString(2));
}
// insert data from source Table
String sourceTable = "zsf";
sql = "INSERT OVERWRITE TABLE " + tableName + " SELECT *
FROM "
+ sourceTable;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
// select * query
sql = "select * from " + tableName + " where id = 1";
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) +
"\t"
+ res.getString(2));
}
// count
sql = "select count(1) from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1));
}
}
}
The execute result is below:
Hive history
file=/tmp/z00100568/hive_job_log_z00100568_201006171621_541044051.txt
10/06/17 16:21:11 INFO exec.HiveHistory: Hive history
file=/tmp/z00100568/hive_job_log_z00100568_201006171621_541044051.txt
Running: add jar /opt/hadoop/hive/lib/hive_hbase-handler.jar
drop table: hive_zsf11
create table: hive_zsf11
Running: create table hive_zsf11(id int, name string) STORED BY 'org.apache.
hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ('hbase.columns.
mapping' = ':key,cf1:val') TBLPROPERTIES ('hbase.table.name' = 'hive_zsf11'
)
Running: show tables 'hive_zsf11'
hive_zsf11
Running: describe hive_zsf11
id int
name string
Running: INSERT OVERWRITE TABLE hive_zsf11 SELECT * FROM zsf
Running: select * from hive_zsf11 where id = 1
1 zsf
Running: select count(1) from hive_zsf11
4
After this, the hive server create a hbase talbe "hive_zsf11" in hbase, and
insert some data into it.
hbase(main):002:0> scan 'hive_zsf11'
ROW COLUMN+CELL
1 column=cf1:val, timestamp=1276762938488, value=zsf
2 column=cf1:val, timestamp=1276762938488, value=zw
3 column=cf1:val, timestamp=1276762938294, value=zzf
4 column=cf1:val, timestamp=1276762938294, value=cjl
4 row(s) in 0.0160 seconds
hbase(main):003:0>
-----邮件原件-----
发件人: Zhou Shuaifeng [mailto:[email protected]]
发送时间: 2010年6月17日 14:21
收件人: [email protected]
抄送: 'zhaozhifeng 00129982'
主题: hive-hbase integration client error, please help
Hi All ,
I've got some problem in programming hive-hbase client, could someone help
me?
The code is very simple, select some data from a hbase-based table.
public static void main(String[] args) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
// System.out.println(e);
e.printStackTrace();
System.exit(1);
}
Connection con = DriverManager.getConnection(
"jdbc:hive://2.1.37.110:10000/default", "", "");
Statement stmt = con.createStatement();
String tableName = "hive_zsf11";
String sql = "select * from " + tableName + " where id = 1";
System.out.println("Running: " + sql);
ResultSet res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) + "\t"
+ res.getString(2));
}
}
}
error info is below:
Hive history
file=/tmp/z00100568/hive_job_log_z00100568_201006171106_406425331.txt
10/06/17 11:06:20 INFO exec.HiveHistory: Hive history
file=/tmp/z00100568/hive_job_log_z00100568_201006171106_406425331.txt
Running: select * from hive_zsf11 where id = 1 Exception in thread "main"
java.sql.SQLException: Query returned non-zero code: 9, cause: FAILED:
Execution Error, return code 2 from
org.apache.hadoop.hive.ql.exec.ExecDriver
at
org.apache.hadoop.hive.jdbc.HiveStatement.executeQuery(HiveStatement.java:19
7)
at com.huawei.hive.HiveJdbcClient.main(HiveJdbcClient.java:69)
Before doing this, I have build the hive-hbase integration code
successfully, and can run SQL to operate hbase-based table well on the
shell.
So, what's more need to do? Thanks a lot.
Best Regards,
Zhou
----------------------------------------------------------------------------
---------------------------------------------------------
This e-mail and its attachments contain confidential information from
HUAWEI, which is intended only for the person or entity whose address is
listed above. Any use of the information contained herein in any way
(including, but not limited to, total or partial disclosure, reproduction,
or dissemination) by persons other than the intended
recipient(s) is prohibited. If you receive this e-mail in error, please
notify the sender by phone or email immediately and delete it!