java.lang.ClassCastException: org.apache.hadoop.hbase.io.ImmutableBytesWritable

anakinli 2014-03-11 05:29:55
我用hadoop2.20 + hbase.98在linux搭建了个环境,跑hadoop自带的mapreduce example没有问题,我写了个以hbase以输入和输出的mapreduce任务,运行报这样的错,源码如下,熟悉的哥们帮忙解答一下:

我在windows 下用eclipse + maven编译成jar包放到Linux服务器上运行的

报的异常如下:
java.lang.Exception: java.lang.ClassCastException: org.apache.hadoop.hbase.io.ImmutableBytesWritable cannot be cast to org.apache.hadoop.hbase.client.Mutation
at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:403)
Caused by: java.lang.ClassCastException: org.apache.hadoop.hbase.io.ImmutableBytesWritable cannot be cast to org.apache.hadoop.hbase.client.Mutation
at org.apache.hadoop.hbase.mapreduce.TableOutputFormat$TableRecordWriter.write(TableOutputFormat.java:87)
at org.apache.hadoop.mapred.MapTask$NewDirectOutputCollector.write(MapTask.java:634)
at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
at pchour.PCHourMapper.map(PCHourMapper.java:20)
at pchour.PCHourMapper.map(PCHourMapper.java:11)
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:763)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:339)
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:235)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)

PCHourMapper.java

package pchour;

import java.io.IOException;

import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;

public class PCHourMapper extends TableMapper<ImmutableBytesWritable, ImmutableBytesWritable> {
public void map(ImmutableBytesWritable row, Result values, Context context) throws IOException, InterruptedException {
ImmutableBytesWritable value = null;
ImmutableBytesWritable key = null;

for (KeyValue kv : values.list()) {
if ("m".equals(Bytes.toString(kv.getFamily())) && "city_id".equals(Bytes.toString(kv.getQualifier()))) {
key = row;
value = new ImmutableBytesWritable(kv.getValue());
context.write(key, value);
}
}
}
}


PCHourReducer.java

package pchour;

import java.io.IOException;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;

public class PCHourReducer extends TableReducer<ImmutableBytesWritable, Iterable<ImmutableBytesWritable>, ImmutableBytesWritable> {

public void reduce(ImmutableBytesWritable key, Iterable<ImmutableBytesWritable> values, Context context) throws IOException, InterruptedException {
int count = 0;
for (ImmutableBytesWritable val : values) {
count += Bytes.toInt(val.get());
}

Put put = new Put(key.get());

put.add(Bytes.toBytes("m"), Bytes.toBytes("city_id_all"), Bytes.toBytes(count));

context.write(key, put);
}
}


PCHourAnalysis.java

package pchour;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.mapreduce.Job;

public class PCHourAnalysis {
protected Configuration configure;
protected int taskCount;

public PCHourAnalysis(String zookeeperClientPort, String zookeeperQuoRum, int taskCount) {
configure = HBaseConfiguration.create();
configure.set("hbase.zookeeper.property.clientPort", zookeeperClientPort);
configure.set("hbase.zookeeper.quorum", zookeeperQuoRum);
}

public int analysis() {
int result = 0;

Job job;
try {
job = new Job(configure);

job.setJobName("pc hour analysis job");

job.setJarByClass(getClass());
job.setMapperClass(PCHourMapper.class);
job.setReducerClass(PCHourReducer.class);

} catch (IOException e) {
result = -1;

return result;
}

Scan scan = new Scan();
scan.setCaching(500);
scan.setCacheBlocks(false);

try {
TableMapReduceUtil.initTableMapperJob("newnetworkbenchSpeedDataRaw", scan, PCHourMapper.class, ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);
TableMapReduceUtil.initTableReducerJob("newnetworkbenchhourresult", PCHourReducer.class, job);
} catch (IOException e) {
result = -2;

return result;
}

job.setNumReduceTasks(taskCount);

try {
if(!job.waitForCompletion(true)) {
result = -3;
}
} catch (ClassNotFoundException | IOException | InterruptedException e) {
result = -4;
}

return result;
}
}


PCHour.java

package pchour;

public class PCHour {

public static void main(String[] args) {
String zookeeperClientPort = "8023";
String zookeeperQuoRum = "tc-op-uaq07.tc,yf-uaq-jsl-ruby00.yf01,tc-uaq-jsl-ruby00.tc,tc-op-uaq17.tc";

PCHourAnalysis hourAnalysis = new PCHourAnalysis(zookeeperClientPort, zookeeperQuoRum, 8);
hourAnalysis.analysis();
}
}
...全文
1140 3 打赏 收藏 转发到动态 举报
写回复
用AI写文章
3 条回复
切换为时间正序
请发表友善的回复…
发表回复
anakinli 2014-03-12
  • 打赏
  • 举报
回复
搞定了,是组件的问题,我下载hadoop和hbase的源码自己编译就ok了
anakinli 2014-03-11
  • 打赏
  • 举报
回复
引用 1 楼 tntzbzc 的回复:
你的代码应该是没问题的,至少在我的 hbase 0.94 + hadoop 1.0.4 测试环境里能够跑通 0.98太前沿了,还没玩过
我怀疑是jar依赖的问题,我用maven编译的,大哥你能列下你的build依赖么?
撸大湿 2014-03-11
  • 打赏
  • 举报
回复
你的代码应该是没问题的,至少在我的 hbase 0.94 + hadoop 1.0.4 测试环境里能够跑通 0.98太前沿了,还没玩过

20,808

社区成员

发帖
与我相关
我的任务
社区描述
Hadoop生态大数据交流社区,致力于有Hadoop,hive,Spark,Hbase,Flink,ClickHouse,Kafka,数据仓库,大数据集群运维技术分享和交流等。致力于收集优质的博客
社区管理员
  • 分布式计算/Hadoop社区
  • 涤生大数据
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧