Map Reduce读取一个库的所有数据表

WordCount 2019-01-26 11:36:26
package abcd;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
//import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Iterator;

//import org.apache.hadoop.mapred.JobConf;
//import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;

public class TablesMR implements DBWritable {
private String DEVID;
private String ACCTIME;
private String IP;
private String DSTIP;
private String GETORPOST;
private String HOST;
private String URL;
private String ACCOUNT;
private String SRC_POSITION;
private String APPID;
private String OP_DST;
private String DST_POSITION;
private String SRCPORT;
private String DSTPORT;
private String mac;

public TablesMR() {
}

public TablesMR(String DEVID, String ACCTIME, String IP, String DSTIP, String GETORPOST, String HOST, String URL, String ACCOUNT, String SRC_POSITION, String APPID, String OP_DST, String DST_POSITION, String SRCPORT, String DSTPORT, String mac) {
this.DEVID = DEVID;
this.ACCTIME = ACCTIME;
this.IP = IP;
this.DSTIP = DSTIP;
this.GETORPOST = GETORPOST;
this.HOST = HOST;
this.URL = URL;
this.ACCOUNT = ACCOUNT;
this.SRC_POSITION = SRC_POSITION;
this.APPID = APPID;
this.OP_DST = OP_DST;
this.DST_POSITION = DST_POSITION;
this.SRCPORT = SRCPORT;
this.DSTPORT = DSTPORT;
this.mac = mac;
}

//DBWritable负责对数据库进行操作,所以输出格式是PreparedStatement
//PreparedStatement接口继承并扩展了Statement接口,用来执行动态的SQL语句,即包含参数的SQL语句
@Override
public void write(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setString(1, DEVID);
preparedStatement.setString(2, ACCTIME);
preparedStatement.setString(3, IP);
preparedStatement.setString(4, DSTIP);
preparedStatement.setString(5, GETORPOST);
preparedStatement.setString(6, HOST);
preparedStatement.setString(7, URL);
preparedStatement.setString(8, ACCOUNT);
preparedStatement.setString(9, SRC_POSITION);
preparedStatement.setString(10, APPID);
preparedStatement.setString(11, OP_DST);
preparedStatement.setString(12, DST_POSITION);
preparedStatement.setString(13, SRCPORT);
preparedStatement.setString(14, DSTPORT);
preparedStatement.setString(15, mac);
}

//DBWritable负责对数据库进行操作,输入格式是ResultSet
// ResultSet接口类似于一张数据表,用来暂时存放从数据库查询操作所获得的结果集
@Override
public void readFields(ResultSet resultSet) throws SQLException {
DEVID = resultSet.getString("DEVID");
ACCTIME = resultSet.getString("ACCTIME");
IP = resultSet.getString("IP");
DSTIP = resultSet.getString("DSTIP");
GETORPOST = resultSet.getString("GETORPOST");
HOST = resultSet.getString("HOST");
URL = resultSet.getString("URL");
ACCOUNT = resultSet.getString("ACCOUNT");
SRC_POSITION = resultSet.getString("SRC_POSITION");
APPID = resultSet.getString("APPID");
OP_DST = resultSet.getString("OP_DST");
DST_POSITION = resultSet.getString("DST_POSITION");
SRCPORT = resultSet.getString("SRCPORT");
DSTPORT = resultSet.getString("DSTPORT");
mac = resultSet.getString("mac");
}

@Override
public String toString() {
return DEVID + "\t" +ACCTIME + "\t" +IP + "\t" +DSTIP + "\t" +
GETORPOST + "\t" +HOST + "\t" +URL + "\t" +ACCOUNT + "\t" +
SRC_POSITION + "\t" +APPID + "\t" +OP_DST + "\t" +DST_POSITION + "\t" +
SRCPORT + "\t" +DSTPORT + "\t" +mac;
}

public static class DataAccessMap extends Mapper<LongWritable,TablesMR,Text,Text>{
@Override
protected void map(LongWritable key, TablesMR value,Context context)
throws IOException, InterruptedException {
System.out.println(value.toString());
context.write(new Text(), new Text(value.toString()));
}
}

public static class DataAccessReducer extends Reducer<Text,Text,Text,Text>{
protected void reduce(Text key, Iterable<Text> values,
Context context)
throws IOException, InterruptedException {
for(Iterator<Text> itr = values.iterator();itr.hasNext();)
{
context.write(key, itr.next());
}
}
}

public static void main(String[] args) throws Exception {

Configuration conf = new Configuration();
//mysql的jdbc驱动
DBConfiguration.configureDB(conf,"com.mysql.jdbc.Driver","jdbc:mysql://192.168.100.62:3306/urlevent?useUnicode=true&characterEncoding=utf8","sakai","sakai");
Job job = Job.getInstance(conf);
job.setJarByClass(TablesMR.class);

job.addArchiveToClassPath(new Path("/zstzjkJar/mysql-connector-java-6.0.6.jar"));
job.setMapperClass(DataAccessMap.class);
job.setReducerClass(DataAccessReducer.class);

job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);

job.setInputFormatClass(DBInputFormat.class);
//对应数据库中的列名(实体类字段)
String[] fields = {"DEVID" + "ACCTIME" + "IP" + "DSTIP"+
"GETORPOST" + "HOST" + "URL" + "ACCOUNT" +
"SRC_POSITION" + "APPID" + "OP_DST" + "DST_POSITION" +
"SRCPORT" + "DSTPORT" + "mac"};
DBInputFormat.setInput(job, TablesMR.class, "urlevent", "1=1", "", fields);
FileOutputFormat.setOutputPath(job, new Path("/zstzjkT1"));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}

我现在只可以读取一张数据表,我该怎么做才可以读取整个库的所有表????麻烦各位帮我解决下问题吧,非常感谢。
...全文
71 回复 打赏 收藏 转发到动态 举报
写回复
用AI写文章
回复
切换为时间正序
请发表友善的回复…
发表回复

20,808

社区成员

发帖
与我相关
我的任务
社区描述
Hadoop生态大数据交流社区,致力于有Hadoop,hive,Spark,Hbase,Flink,ClickHouse,Kafka,数据仓库,大数据集群运维技术分享和交流等。致力于收集优质的博客
社区管理员
  • 分布式计算/Hadoop社区
  • 涤生大数据
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧