7,388
社区成员
发帖
与我相关
我的任务
分享
package com.inspur.udf;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
public class start_prior {
public static String evaluate(String table, String column, String column_sj, String para) throws ClassNotFoundException {
boolean check_re = true;
StringBuilder sb = new StringBuilder();
StringBuilder sb_tmp = new StringBuilder();
sb.append(para);
String driverName="org.apache.hive.jdbc.HiveDriver";
String url = "jdbc:hive2://10.111.30.41:10000/default;principal=hive/indata-10-111-30-41.indata.com@INDATA.COM";
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "Kerberos");
try {
System.setProperty("java.security.krb5.conf", "D:\\workspace\\UDF-MD5\\src\\main\\kerberos\\krb5.conf");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab("hive/indata-10-111-30-41.indata.com@INDATA.COM", "D:\\workspace\\UDF-MD5\\src\\main\\kerberos\\hive.service.keytab");
} catch (IOException e) {
e.printStackTrace();
}
try {
Class.forName(driverName);
Connection conn = DriverManager.getConnection(url);
PreparedStatement stmt = null;
ResultSet resultSet = null;
do{
String sql = "select " + column + " from " + table + " where " + column_sj + " in (" + para + ")";
System.out.println(sql);
stmt = conn.prepareStatement(sql);
resultSet = stmt.executeQuery();
if (resultSet.next()){
do{
sb_tmp.append(resultSet.getString(1) + ",");
sb.append("," + resultSet.getString(1));
}while(resultSet.next());
para=sb_tmp.deleteCharAt(sb_tmp.length()-1).toString();
sb_tmp.delete(0,sb_tmp.length());
}else {
check_re=false;
}
}while (check_re);
resultSet.close();
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
return sb.toString();
}
}