eclipse编写的Java包在linux上运行出错 NoSuchMethodError:org.apache.hadoop.mapreduce.v2.uti

八维 2017-06-29 04:24:58
编写的是一个日志导入和清洗功能的java程序,在eclipse上运行没有问题,打包成Jar运行在linux中时出错,查了好多解决方法都不行:
首先代码如下:
package hmbbs2;

import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class HmbbsC extends Configured implements Tool{


//static final String INPUT_PATH ="hdfs://hadoopwang:9000/hmbbs_logs/access_2013_05_30.log";
//static final String OUT_PATH ="hdfs://hadoopwang:9000/hmbbs_cleaned";
public int run(String[] args) throws Exception {
@SuppressWarnings("deprecation")
Job job = new Job(new Configuration(), HmbbsC.class.getSimpleName());
job.setJarByClass(HmbbsC.class);
//1.1 制定输入文件路径
FileInputFormat.setInputPaths(job, args[0]);

//1.2制定自定义的Mapper类
job.setMapperClass(MyMapper.class);
//指定输出的《k2,v2>的类型
job.setMapOutputKeyClass(LongWritable.class);
job.setMapOutputValueClass(Text.class);
//2.2指定自定义的reduce类
job.setReducerClass(MyReducer.class);
//指定输出的《K3,V3>类型
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
//2.3指定输出位置
FileOutputFormat.setOutputPath(job, new Path(args[1]));
//将代码提交jobtracker
job.waitForCompletion(true);
return 0;
}

public static void main(String[] args) throws Exception {
ToolRunner.run(new HmbbsC(), args);

}
static class MyMapper extends Mapper<LongWritable, Text, LongWritable, Text>{
LogParser logParser =new LogParser();
Text v2 = new Text();
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, LongWritable, Text>.Context context)
throws IOException, InterruptedException {
String[] parsed = logParser.parse(value.toString());

//过滤掉静态信息GET/STATIC
if (parsed[2].startsWith("GET /static/") || parsed[2].startsWith("GET /uc_server")){
return;
}
//过滤结尾的特定字符串格式
if (parsed[2].endsWith("HTTP/1.1")){
parsed[2] = parsed[2].substring(0, parsed[2].length() - " HTTP/1.1".length());
}
//删除掉开头的特定字符格式串
if (parsed[2].startsWith("GET /")){
parsed[2] = parsed[2].substring("GET /".length());
}
else if (parsed[2].startsWith("POST /")){
parsed[2] = parsed[2].substring("POST /".length());
}

v2.set(parsed[0]+"\t"+parsed[1]+"\t"+parsed[2]);
context.write(key, v2);
}
}
static class MyReducer extends Reducer<LongWritable, Text, Text, NullWritable>{
@Override
protected void reduce(LongWritable k2, Iterable<Text> v2s,
Reducer<LongWritable, Text, Text, NullWritable>.Context context) throws IOException, InterruptedException {

for (Text v2 : v2s) {
context.write(v2 , NullWritable.get());

}

}
}
}
class LogParser {
public static final SimpleDateFormat FORMAT = new SimpleDateFormat("d/MMM/yyyy:HH:mm:ss", Locale.ENGLISH);
public static final SimpleDateFormat dateformat1 = new SimpleDateFormat("yyyyMMddHHmmss");
public static void main(String[] args) throws ParseException {
final String S1 = "27.19.74.143 - - [30/May/2013:17:38:20 +0800] \"GET /static/image/common/faq.gif HTTP/1.1\" 200 1127";
LogParser parser = new LogParser();
final String[] array = parser.parse(S1);
System.out.println("样例数据: "+S1);
System.out.format("解析结果: ip=%s, time=%s, url=%s, status=%s, traffic=%s", array[0], array[1], array[2], array[3], array[4]);
}
/**
* 解析英文时间字符串
* @param string
* @return
* @throws ParseException
*/
private Date parseDateFormat(String string){
Date parse = null;
try {
parse = FORMAT.parse(string);
} catch (ParseException e) {
e.printStackTrace();
}
return parse;
}
/**
* 解析日志的行记录
* @param line
* @return 数组含有5个元素,分别是ip、时间、url、状态、流量
*/
public String[] parse(String line){
String ip = parseIP(line);
String time = parseTime(line);
String url = parseURL(line);
String status = parseStatus(line);
String traffic = parseTraffic(line);

return new String[]{ip, time ,url, status, traffic};
}

private String parseTraffic(String line) {
final String trim = line.substring(line.lastIndexOf("\"")+1).trim();
String traffic = trim.split(" ")[1];
return traffic;
}
private String parseStatus(String line) {
final String trim = line.substring(line.lastIndexOf("\"")+1).trim();
String status = trim.split(" ")[0];
return status;
}
private String parseURL(String line) {
final int first = line.indexOf("\"");
final int last = line.lastIndexOf("\"");
String url = line.substring(first+1, last);
return url;
}
private String parseTime(String line) {
final int first = line.indexOf("[");
final int last = line.indexOf("+0800]");
String time = line.substring(first+1,last).trim();
Date date = parseDateFormat(time);
return dateformat1.format(date);

}
private String parseIP(String line) {
String ip = line.split("- -")[0].trim();
return ip;
}

}


运行出错为:
17/06/29 10:00:38 INFO client.RMProxy: Connecting to ResourceManager at /192.168.28.10:8032
17/06/29 10:00:39 WARN mapreduce.JobResourceUploader: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
17/06/29 10:00:39 INFO input.FileInputFormat: Total input paths to process : 1
17/06/29 10:00:39 INFO mapreduce.JobSubmitter: number of splits:1
17/06/29 10:00:39 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1498738835275_0023
17/06/29 10:00:39 INFO mapreduce.JobSubmitter: Cleaning up the staging area /tmp/hadoop-yarn/staging/root/.staging/job_1498738835275_0023
Exception in thread "main" java.lang.NoSuchMethodError: org.apache.hadoop.mapreduce.v2.util.MRApps.crossPlatformifyMREnv(Lorg/apache/hadoop/conf/Configuration;Lorg/apache/hadoop/yarn/api/ApplicationConstants$Environment;)Ljava/lang/String;
at org.apache.hadoop.mapred.YARNRunner.createApplicationSubmissionContext(YARNRunner.java:392)
at org.apache.hadoop.mapred.YARNRunner.submitJob(YARNRunner.java:285)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:240)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1290)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1287)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1287)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1308)
at hmbbs.HmbbsCleaner.main(HmbbsCleaner.java:52)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)


找了整整一天还是一直存在这个问题,求教
...全文
665 1 打赏 收藏 转发到动态 举报
写回复
用AI写文章
1 条回复
切换为时间正序
请发表友善的回复…
发表回复

20,809

社区成员

发帖
与我相关
我的任务
社区描述
Hadoop生态大数据交流社区,致力于有Hadoop,hive,Spark,Hbase,Flink,ClickHouse,Kafka,数据仓库,大数据集群运维技术分享和交流等。致力于收集优质的博客
社区管理员
  • 分布式计算/Hadoop社区
  • 涤生大数据
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧