Hadoop排序出现错误java.lang.NumberFormatException: For input string: ""怎么解决?

sen_lin8350 2013-05-14 11:53:43
import java.io.IOException;
//import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class Sort {
public static class Map extends Mapper<Object, Text, IntWritable, IntWritable>{
private static IntWritable data = new IntWritable();
public void map(Object key, Text value, Context context)
throws IOException,InterruptedException{
String line = value.toString();
System.out.println(line);
if(line != ""){
data.set(Integer.parseInt(line));
context.write(data, new IntWritable(1));
}
}
}

public static class Reduce extends Reducer<IntWritable, IntWritable, IntWritable, IntWritable>{
private static IntWritable linenum = new IntWritable(1);

public void reduce(IntWritable key, Iterable<IntWritable> values, Context context)throws IOException, InterruptedException{
for (IntWritable val : values){
context.write(linenum, key);
linenum = new IntWritable(linenum.get()+1);
}
}
}
public static void main(String[] args) throws Exception{
Configuration conf = new Configuration();
String [] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if(otherArgs.length != 2){
System.err.println("Usage: wordcount <in> <out>");
System.exit(2);
}

Job job = new Job(conf, "Sort");
job.setJarByClass(Sort.class);
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(IntWritable.class);

FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
System.exit(job.waitForCompletion(true) ? 0 :1);

}
}


13/05/14 11:45:36 WARN mapred.LocalJobRunner: job_local_0001
java.lang.NumberFormatException: For input string: ""
at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65)
at java.lang.Integer.parseInt(Integer.java:493)
at java.lang.Integer.parseInt(Integer.java:514)
at Sort$Map.map(Sort.java:23)
at Sort$Map.map(Sort.java:1)
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:144)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:621)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:305)
at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:177)
13/05/14 11:45:37 INFO mapred.JobClient: map 0% reduce 0%
13/05/14 11:45:37 INFO mapred.JobClient: Job complete: job_local_0001
13/05/14 11:45:37 INFO mapred.JobClient: Counters: 0

数据为:
1)file1:
2
32
654
32
15
756
65223
2)file2
5956
22
650
92
3)file3:
26
54
6

...全文
1718 3 打赏 收藏 转发到动态 举报
写回复
用AI写文章
3 条回复
切换为时间正序
请发表友善的回复…
发表回复
sbwfgihc 2014-04-26
  • 打赏
  • 举报
回复
引用 2 楼 sen_lin8350 的回复:
可以了谢谢!
怎么解决的啊
撸大湿 2013-05-14
  • 打赏
  • 举报
回复
其他代码还没看
但发现一个明显错误
JAVA字符串比较不能用==或!=,改成equals方法

public void map(Object key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
System.out.println(line);
if (!line.equals("")) { //JAVA字符串比较不能用==或!=,改成equals方法
data.set(Integer.parseInt(line));
context.write(data, new IntWritable(1));
}
}
sen_lin8350 2013-05-14
  • 打赏
  • 举报
回复
可以了谢谢!

20,808

社区成员

发帖
与我相关
我的任务
社区描述
Hadoop生态大数据交流社区,致力于有Hadoop,hive,Spark,Hbase,Flink,ClickHouse,Kafka,数据仓库,大数据集群运维技术分享和交流等。致力于收集优质的博客
社区管理员
  • 分布式计算/Hadoop社区
  • 涤生大数据
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧