20,848
社区成员




Exception in thread "main" java.lang.IllegalArgumentException: Pathname /E:/workspace_hadoop/zTestHBase/hbasejars/protobuf-java-2.5.0.jar from hdfs://172.17.40.250:9000/E:/workspace_hadoop/zTestHBase/hbasejars/protobuf-java-2.5.0.jar is not a valid DFS filename.
at org.apache.hadoop.hdfs.DistributedFileSystem.getPathName(DistributedFileSystem.java:184)
at org.apache.hadoop.hdfs.DistributedFileSystem.access$000(DistributedFileSystem.java:92)
at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1106)
at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1102)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1102)
at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.getFileStatus(ClientDistributedCacheManager.java:288)
at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.getFileStatus(ClientDistributedCacheManager.java:224)
at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.determineTimestamps(ClientDistributedCacheManager.java:93)
at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.determineTimestampsAndCacheVisibilities(ClientDistributedCacheManager.java:57)
at org.apache.hadoop.mapreduce.JobSubmitter.copyAndConfigureFiles(JobSubmitter.java:264)
at org.apache.hadoop.mapreduce.JobSubmitter.copyAndConfigureFiles(JobSubmitter.java:300)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:387)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1268)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1265)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1265)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1286)
at hmrDemo.HBaseWithMapWithoutReduce.main(HBaseWithMapWithoutReduce.java:46)
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
public class HBaseWithMapWithoutReduce {
private static class MyMapper extends TableMapper<Text, LongWritable> {
public void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
System.err.println(Text.decode(key.get()));
System.err.println(value.toString());
System.err.println(context.toString());
}
}
public static void main(String[] args) throws Exception {
if (args == null || args.length != 2) {
args = new String[] { "content", "Hello" };
}
String tableName = "wordcount";
Configuration conf = HBaseConfiguration.create();
conf.set("mapred.job.tracker", "JT HOST或IP:9001");
conf.set("hbase.zookeeper.quorum", "ZK HOST或IP");
conf.set("hbase.zookeeper.property.clientPort", "2181");
Job job = new Job(conf, "ExampleRead");
job.setJarByClass(HBaseWithMapWithoutReduce.class);
Scan scan = new Scan();
scan.setCaching(500);
scan.setCacheBlocks(false);
TableMapReduceUtil.initTableMapperJob(tableName, scan, MyMapper.class, null, null, job);
job.setOutputFormatClass(NullOutputFormat.class);
boolean b = job.waitForCompletion(true);
if (!b) {
throw new IOException("error with job!");
}
}
}
package hmrDemo;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
public class HBaseWithMapWithoutReduce {
private static class MyMapper extends TableMapper<Text, LongWritable> {
public void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
System.err.println(Text.decode(key.get()));
System.err.println(value.toString());
System.err.println(context.toString());
}
}
public static void main(String[] args) throws Exception {
if (args == null || args.length != 2) {
args = new String[] {"content", "Hello"};
}
String tableName = "wordcount";
Configuration config = HBaseConfiguration.create();
Job job = Job.getInstance(config, "ExampleRead");
job.setJarByClass(HBaseWithMapWithoutReduce.class);
Scan scan = new Scan();
scan.setCaching(500);
scan.setCacheBlocks(false);
TableMapReduceUtil.initTableMapperJob(tableName, scan, MyMapper.class, null, null, job);
job.setOutputFormatClass(NullOutputFormat.class);
// Iterator<Entry<String, String>> iterator = config.iterator();
// while (iterator.hasNext()) {
// Entry<String, String> entry = iterator.next();
// System.err.println(entry.getKey() + " -> " + entry.getValue());
// }
boolean b = job.waitForCompletion(true);
if (!b) {
throw new IOException("error with job!");
}
}
}
就是照着文档中的demo敲的代码