Exception in thread "main" java.lang.NullPointerException
$囧 2019-04-28 09:36:42 运行结果:
log4j:WARN No appenders could be found for logger (org.apache.hadoop.util.Shell).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
null
Exception in thread "main" java.lang.NullPointerException
at java.lang.ProcessBuilder.start(Unknown Source)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:482)
at org.apache.hadoop.util.Shell.run(Shell.java:455)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:702)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:774)
at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:572)
at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getOwner(RawLocalFileSystem.java:555)
at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:110)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:348)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1285)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1282)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Unknown Source)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1282)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1303)
at cn.music.TableMapperDemo.main(TableMapperDemo.java:85)
------------------------------------------------------------------------------------------------------------------------------------
代码:
package cn.music;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class TableMapperDemo {
static class MyMapper extends TableMapper <Text, Text>{
@Override
protected void map(ImmutableBytesWritable key,Result value,Context context) throws IOException,InterruptedException{
List<Cell> cells=value.listCells();
for(Cell cell:cells){
String outValue=String.format("RowKey:%s Family:%s Qualifier:%s cellValue:%s",
Bytes.toString(key.get()),
Bytes.toString(CellUtil.cloneFamily(cell)),
Bytes.toString(CellUtil.cloneQualifier(cell)),
Bytes.toString(CellUtil.cloneValue(cell)));
context.write(new Text(CellUtil.getCellKeyAsString(cell)),new Text(outValue));
}
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException{
Configuration conf=HBaseConfiguration.create();
GenericOptionsParser gop=new GenericOptionsParser(conf, args);
System.out.println(conf.get("tmpjars"));
conf.set("hbase.zookeeper.quorum","Cluster-01:2181,Cluster-02:2181,Cluster-03:2181,Cluster-04:2181,Cluster-05:2181");
conf.set("hbase.zookeeper.property.clientPort","2181");
conf.set(TableInputFormat.INPUT_TABLE,"music");
conf.set(TableInputFormat.SCAN_COLUMNS,"info:name info:gender");
//conf.set(TableInputFormat.SCAN_ROW_START,"12_song12_2016-1-13");
conf.set("mapreduce.framework.name", "yarn");
conf.set("yarn.resourcemanager.address", "Cluster-01"+":"+8032);
conf.set("yarn.resourcemanager.scheduler.address", "Cluster-01"+":"+8030);
conf.addResource("mapred-site.xml");
conf.addResource("core-site.xml");
conf.addResource("hdfs-site.xml");
conf.addResource("yarn-site.xml");
Job job=Job.getInstance(conf, "hbase-mapreduce-api");
job.setJarByClass(TableMapperDemo.class);
job.setInputFormatClass(TableInputFormat.class);
job.setMapperClass(MyMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
List<String> libjars=new ArrayList<String>();
libjars.add("file:/home/admin/hadoop/hadoop-2.7.3/share/hadoop/common/lib/zookeeper-3.4.6.jar");
libjars.add("file:/home/admin/hadoop/hadoop-2.7.3/share/hadoop/common/hadoop-common-2.7.3.jar");
libjars.add("file:/home/admin/hadoop/hadoop-2.7.3/share/hadoop/common/lib/guava-11.0.2.jar");
libjars.add("file:/home/admin/hadoop/hadoop-2.7.3/share/hadoop/common/lib/protobuf-java-2.5.0.jar");
libjars.add("file:/home/admin/hadoop/hadoop-2.7.3/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.3.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/hbase-server-1.2.3.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/hbase-hadoop-compat-1.2.3.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/hbase-client-1.2.3.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/metrics-core-2.2.0.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/netty-all-4.0.23.Final.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/htrace-core-3.1.0-incubating.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/hbase-common-1.2.3.jar");
libjars.add("file:/home/admin/hbase/hbase-1.2.3/lib/hbase-protocol-1.2.3.jar");
job.getConfiguration().set("tmpjars", StringUtils.join(libjars, ','));
Path output=new Path("/output2/music3");
if(FileSystem.get(conf).exists(output))
FileSystem.get(conf).delete(output,true);
FileOutputFormat.setOutputPath(job, output);
job.waitForCompletion(true);
}
}