20,808
社区成员
发帖
与我相关
我的任务
分享
try {
Class.forName("org.apache.hive.jdbc.HiveDriver");
} catch (ClassNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try{
Connection con = DriverManager.getConnection("jdbc:hive2://hadoop.master:10000/default","hadoop","hadoop");
Statement sta = con.createStatement();
ResultSet result = sta.executeQuery("select * from test");
while(result.next()){
System.out.println(result.getString(1));
}
con.close();
} catch(SQLException e) {
e.printStackTrace();
}
15/07/14 09:26:35 INFO jdbc.Utils: Supplied authorities: hadoop.master:10000
15/07/14 09:26:35 INFO jdbc.Utils: Resolved authority: hadoop.master:10000
15/07/14 09:26:35 INFO jdbc.HiveConnection: Will try to open client transport with JDBC Uri: jdbc:hive2://hadoop.master:10000/default
1
1
TTransport transport = null;
try {
// transport = new TSocket("172.199.10.3", 9083);
transport = new TSocket("192.168.159.128", 9083);
// 指定通信协议为二进制流协议
TProtocol protocol = new TBinaryProtocol(transport);
System.out.println("开始");
try {
transport.open();
System.out.println(transport.isOpen());
} catch (TTransportException e) {
throw new SQLException("Could not establish connecton to: "
+ e.getMessage(), "08S01");
}
ThriftHive.Client client = new ThriftHive.Client(protocol);
List<String> dbs = client.get_all_databases();
for (String db : dbs) {
System.out.println("数据库:" + db);
List<String> tbs = client.get_all_tables(db);
for (String t : tbs) {
System.out.println(" 表:" + t);
List<FieldSchema> fields = client.get_fields(db, t);
for (FieldSchema f : fields) {
System.out.println(" 字段:" + f.getName());
}
}
}
} catch (Exception x) {
x.printStackTrace();
} finally {
if (null != transport) {
try {
transport.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
开始
true
数据库:default
表:test
字段:id
表:test2
字段:id
表:test3
字段:id
表:test8
字段:id
List<String> dbs = client.get_all_databases();
TTransport transport = null;
try {
// transport = new TSocket("172.199.10.3", 9083);
transport = new TSocket("192.168.159.128", 9083);
// 指定通信协议为二进制流协议
TProtocol protocol = new TBinaryProtocol(transport);
System.out.println("开始");
try {
transport.open();
System.out.println(transport.isOpen());
} catch (TTransportException e) {
throw new SQLException("Could not establish connecton to: "
+ e.getMessage(), "08S01");
}
ThriftHive.Client client = new ThriftHive.Client(protocol);
client.execute("use default");
client.execute("show tables");
List<String> list = client.fetchAll();
for (String l : list) {
System.out.println(l);
}
System.out.println("OK");
} catch (Exception x) {
x.printStackTrace();
} finally {
if (null != transport) {
try {
transport.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
开始
true
org.apache.thrift.TApplicationException: Invalid method name: 'execute'
at org.apache.thrift.TApplicationException.read(TApplicationException.java:111)
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:71)
at org.apache.hadoop.hive.service.ThriftHive$Client.recv_execute(ThriftHive.java:116)
at org.apache.hadoop.hive.service.ThriftHive$Client.execute(ThriftHive.java:103)
at hiveDemo.testHiveClient.main(testHiveClient.java:49)