我编写了一个自定义的MyWritable,在eclipse中运行的时候他说找不道读取的文件路径,但是这个文件路径是对着的。但是把老师的项目导进来后我的就能运行了

crazy_tiddler 2017-09-07 10:22:40
我编写了一个自定义的MyWritable,
在eclipse中运行的时候他说找不道读取的文件路径,但是这个文件路径是对着的。
并且把老师的项目导进来后我的项目就能运行了,把老师的项目删除后我的项目就会报找不到读取文件的错误。是哪里的原因,老师项目里有的系统文件配置我的项目里都有,都是直接从老师项目里cp过来的,保证没错误。
报的错误:
Exception in thread "main" org.apache.hadoop.mapreduce.lib.input.InvalidInputException: Input path does not exist: file:/xiaoyu/b
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:323)
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:265)
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:387)
at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:304)
at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:321)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:199)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1307)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1304)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1304)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1325)
at com.vdata.check.MyDriver.run(MyDriver.java:50)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)
at com.vdata.check.MyDriver.main(MyDriver.java:56)

部分代码

package com.vdata.check;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


public class MyDriver extends Configured implements Tool {

@Override
public int run(String[] args) throws Exception {
String[] otherArgs = new GenericOptionsParser(getConf(), args)
.getRemainingArgs();
if (otherArgs.length < 2) {
System.out
.println("Wrong number of arguments: " + otherArgs.length);
return -1;
}



String hdfsInPath = otherArgs[0];// 存放在hdfs上面的文件路径
String hdfsOutPath = otherArgs[1];// 输出路径

Job job = Job.getInstance();
job.setJarByClass(MyWritable.class);
job.setJar("e://MyWritable.jar");

job.setJobName("qweqwe");

job.setMapperClass(MyMapper.class);
job.setReducerClass(MyReducer.class);

job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);

FileInputFormat.addInputPath(job, new Path(hdfsInPath));
FileOutputFormat.setOutputPath(job, new Path(hdfsOutPath));

int status=job.waitForCompletion(true)?0:1;
return status;
}

public static void main(String[] args) throws Exception {
System.setProperty("HADOOP_USER_NAME", "root");
int status = ToolRunner.run(new MyDriver(), args);
System.exit(status);
}

}





package com.vdata.check;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;

public class MyWritable implements WritableComparable<MyWritable>{

private Text key;
private LongWritable num;


public MyWritable() {
this(new Text(),new LongWritable());
}

public MyWritable(Text key, LongWritable num) {
this.key = key;
this.num = num;
}

public Text getKey() {
return key;
}

public void setKey(Text key) {
this.key = key;
}

public LongWritable getNum() {
return num;
}

public void setNum(LongWritable num) {
this.num = num;
}

@Override
public void readFields(DataInput input) throws IOException {
key.readFields(input);
num.readFields(input);
}

@Override
public void write(DataOutput output) throws IOException {
key.write(output);
num.write(output);
}

@Override
public int compareTo(MyWritable obj) {
int status=0;
if (obj !=null) {
status=this.key.compareTo(obj.getKey());
}
if (status!=0) {
return status;
}
status =this.num.compareTo(obj.getNum());
return status;
}

}

...全文
261 回复 打赏 收藏 转发到动态 举报
写回复
用AI写文章
回复
切换为时间正序
请发表友善的回复…
发表回复

20,808

社区成员

发帖
与我相关
我的任务
社区描述
Hadoop生态大数据交流社区,致力于有Hadoop,hive,Spark,Hbase,Flink,ClickHouse,Kafka,数据仓库,大数据集群运维技术分享和交流等。致力于收集优质的博客
社区管理员
  • 分布式计算/Hadoop社区
  • 涤生大数据
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧