java 查询hive 无法获取日志 (hive CDH5.12.1 )

五哥 2018-07-12 11:15:34
package com.cai.test;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.List;

import org.apache.hive.jdbc.HiveStatement;

import com.google.common.collect.Lists;

public class HiveJdbcQueryLog {

public static void main(String[] args) throws Exception {
Class.forName("org.apache.hive.jdbc.HiveDriver");
Connection connection = DriverManager.getConnection(
"jdbc:hive2://121.7.35.60:10000/;auth=noSasl", "caidm", "");
HiveStatement stmt = (HiveStatement) connection.createStatement();

String sql = "select count(*) from wzw";

try {
Thread logThread = new Thread(new LogTask(stmt));
logThread.setDaemon(true);
logThread.start();
boolean hasResult = stmt.execute(sql);
logThread.interrupt();
if (hasResult) {
ResultSet resultSet = stmt.getResultSet();
ResultSetMetaData metaData = resultSet.getMetaData();
int colCount = metaData.getColumnCount();
List<String> res = Lists.newArrayList();
while (resultSet.next()) {
List<String> row = Lists.newArrayList();
for (int i = 1; i <= colCount; i++) {
row.add(resultSet.getString(i));
}
res.addAll(row);
}
System.out.println(res);
//System.out.println(JSON.toJSONString(res));
} else {
stmt.getUpdateCount();
}
} catch (Exception ex) {
ex.printStackTrace();
}finally {
stmt.close();
connection.close();
}
}

public static class LogTask implements Runnable {
private HiveStatement stmt;
public LogTask(HiveStatement stmt) {
this.stmt = stmt;
}

public void run() {
try {
while (stmt.hasMoreLogs()) {
try {
for (String line : stmt.getQueryLog(false, 1000)) {
System.out.println(line);
}
Thread.sleep(200);
} catch (SQLException e) {
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}



异常错误:
18/07/12 11:10:46 INFO jdbc.Utils: Supplied authorities: 10.17.35.60:10000
18/07/12 11:10:46 INFO jdbc.Utils: Resolved authority: 10.17.35.60:10000
org.apache.hive.service.cli.HiveSQLException: Expected state FINISHED, but found RUNNING
at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:241)
at org.apache.hive.jdbc.Utils.verifySuccessWithInfo(Utils.java:227)
at org.apache.hive.jdbc.HiveStatement.getQueryLog(HiveStatement.java:817)
at com.cai.test.HiveJdbcQueryLog$LogTask.run(HiveJdbcQueryLog.java:65)
at java.lang.Thread.run(Thread.java:745)
org.apache.hive.service.cli.HiveSQLException: Expected state FINISHED, but found RUNNING
at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:241)
at org.apache.hive.jdbc.Utils.verifySuccessWithInfo(Utils.java:227)
at org.apache.hive.jdbc.HiveStatement.getQueryLog(HiveStatement.java:817)
at com.cai.test.HiveJdbcQueryLog$LogTask.run(HiveJdbcQueryLog.java:65)
at java.lang.Thread.run(Thread.java:745)
org.apache.hive.service.cli.HiveSQLException: Expected state FINISHED, but found RUNNING
at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:241)
at org.apache.hive.jdbc.Utils.verifySuccessWithInfo(Utils.java:227)
at org.apache.hive.jdbc.HiveStatement.getQueryLog(HiveStatement.java:817)
at com.cai.test.HiveJdbcQueryLog$LogTask.run(HiveJdbcQueryLog.java:65)
at java.lang.Thread.run(Thread.java:745)
0
java.lang.InterruptedException: sleep interrupted
at java.lang.Thread.sleep(Native Method)
at com.cai.test.HiveJdbcQueryLog$LogTask.run(HiveJdbcQueryLog.java:68)
at java.lang.Thread.run(Thread.java:745)
[]
...全文
733 1 打赏 收藏 转发到动态 举报
写回复
用AI写文章
1 条回复
切换为时间正序
请发表友善的回复…
发表回复
五哥 2018-07-25
  • 打赏
  • 举报
回复
package com.cai.test;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.List;

import org.apache.hive.jdbc.HiveStatement;

import com.google.common.collect.Lists;

public class HiveJdbcQueryLog {

public static void main(String[] args) throws Exception {
Class.forName("org.apache.hive.jdbc.HiveDriver");//;auth=noSasl
Connection connection = DriverManager.getConnection(
"jdbc:hive2://10.17.35.60:10000/default;auth=noSasl", "caidm", "cdm!@#123aB");
HiveStatement stmt = (HiveStatement) connection.createStatement();

String sql = "show tables " ;//"select count(*) from cdm_test1";

try {
Thread logThread = new Thread(new LogTask(stmt));
logThread.setDaemon(true);
logThread.start();
boolean hasResult = stmt.execute(sql);
System.out.println(stmt.hasMoreLogs()) ;
logThread.interrupt();
if (hasResult) {
ResultSet resultSet = stmt.getResultSet();
ResultSetMetaData metaData = resultSet.getMetaData();
int colCount = metaData.getColumnCount();
List<String> res = Lists.newArrayList();
while (resultSet.next()) {
List<String> row = Lists.newArrayList();
for (int i = 1; i <= colCount; i++) {
row.add(resultSet.getString(i));
}
res.addAll(row);
}
System.out.println(res);

//System.out.println(JSON.toJSONString(res));
} else {
stmt.getUpdateCount();
}
} catch (Exception ex) {
ex.printStackTrace();
}finally {
stmt.close();
connection.close();
}
}

public static class LogTask implements Runnable {
private HiveStatement stmt;
public LogTask(HiveStatement stmt) {
this.stmt = stmt;
}

public void run() {
try {
while (stmt.hasMoreLogs()) {
try {
for (String line : stmt.getQueryLog(false, 1000)) {
System.out.println(line);
}
//Thread.sleep(500);
} catch (SQLException e) {
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}









结果:
log4j:WARN No appenders could be found for logger (org.apache.hive.jdbc.Utils).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
INFO : Compiling command(queryId=hive_20180725104646_0a7d5fd7-9a3d-4e88-8430-e5046c9079bd): show tables
INFO : Semantic Analysis Completed
INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:tab_name, type:string, comment:from deserializer)], properties:null)
INFO : Completed compiling command(queryId=hive_20180725104646_0a7d5fd7-9a3d-4e88-8430-e5046c9079bd); Time taken: 0.092 seconds
INFO : Executing command(queryId=hive_20180725104646_0a7d5fd7-9a3d-4e88-8430-e5046c9079bd): show tables
INFO : Starting task [Stage-0:DDL] in serial mode
INFO : Completed executing command(queryId=hive_20180725104646_0a7d5fd7-9a3d-4e88-8430-e5046c9079bd); Time taken: 0.067 seconds
INFO : OK
false
[a11, a_agg_graycallcdr_ss, aaa, aaa_201708, aaaa]

20,808

社区成员

发帖
与我相关
我的任务
社区描述
Hadoop生态大数据交流社区,致力于有Hadoop,hive,Spark,Hbase,Flink,ClickHouse,Kafka,数据仓库,大数据集群运维技术分享和交流等。致力于收集优质的博客
社区管理员
  • 分布式计算/Hadoop社区
  • 涤生大数据
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧