文件存在但是程序却找不到文件

lichao23jordan 2015-12-17 10:28:49
[root@chinadaas11 credit]# hadoop jar /tmp/lib/chinadaas-tdh.jar com.chinadaas.mr.IncrementalPut ..
2015-12-17 09:47:02 [ WARN] org.apache.hadoop.mapred.JobClient copyAndConfigureFiles main:755: - Use GenericOptionsParser for parsing the arguments. Applications should implement Tool for the same.
2015-12-17 09:47:02 [ INFO] org.apache.hadoop.mapred.JobClient$2 run main:1032: - Cleaning up the staging area hdfs://chinadaas11:8020/mapreduce1/staging/root/.staging/job_201512141934_0029
2015-12-17 09:47:02 [ERROR] org.apache.hadoop.security.UserGroupInformation doAs main:1411: - PriviledgedActionException as:root (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /tmp/lib/hbase-0.94.11-transwarp.jar
Exception in thread "main" java.io.FileNotFoundException: File does not exist: /tmp/lib/hbase-0.94.11-transwarp.jar
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:824)
at org.apache.hadoop.filecache.DistributedCache.getFileStatus(DistributedCache.java:185)
at org.apache.hadoop.filecache.TrackerDistributedCacheManager.getFileStatus(TrackerDistributedCacheManager.java:746)
at org.apache.hadoop.filecache.TrackerDistributedCacheManager.determineTimestamps(TrackerDistributedCacheManager.java:815)
at org.apache.hadoop.filecache.TrackerDistributedCacheManager.determineTimestampsAndCacheVisibilities(TrackerDistributedCacheManager.java:778)
at org.apache.hadoop.mapred.JobClient.copyAndConfigureFiles(JobClient.java:853)
at org.apache.hadoop.mapred.JobClient.copyAndConfigureFiles(JobClient.java:743)
at org.apache.hadoop.mapred.JobClient.access$400(JobClient.java:174)
at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:961)
at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:946)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:396)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1408)
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:946)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:566)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:596)
at com.chinadaas.mr.IncrementalPut.run(IncrementalPut.java:223)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)
at com.chinadaas.mr.IncrementalPut.main(IncrementalPut.java:233)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.hadoop.util.RunJar.main(RunJar.java:208)
[root@chinadaas11 credit]# ll /tmp/lib
total 6612
-rw-r--r-- 1 root root 66589 Nov 27 15:34 chinadaas-tdh.jar
-rw-r--r-- 1 root root 5385292 Nov 27 15:34 hbase-0.94.11-transwarp.jar
-rw-r--r-- 1 root root 1311975 Nov 27 15:34 zookeeper-3.4.5-transwarp.jar
...全文
417 2 打赏 收藏 转发到动态 举报
写回复
用AI写文章
2 条回复
切换为时间正序
请发表友善的回复…
发表回复
DayDayUp丶 2019-06-12
  • 打赏
  • 举报
回复
楼主问题的原因是因为Linux用户权限不足。我也有同样的问题,查看https://blog.csdn.net/songzehao/article/details/91560692可解决。
上帝的纹身 2015-12-17
  • 打赏
  • 举报
回复
package com.chinadaas.mr; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.PropertyConfigurator; import com.chinadaas.conf.ConfigManager; import com.chinadaas.hbase.data.DisBzxr; public class IncrementalPut extends Configured implements Tool { public static final Log LOG = LogFactory.getLog(IncrementalPut.class); public static final String BASE_DIR = "base.dir"; public static final String INDEX_NAME_POSTFIX = "_name"; public static final String INDEX_CARD_POSTFIX = "_card"; public static final String MAPRED_INPUT_PATH = "mapred.input.path"; public static final String MAPRED_OUTPUT_PATH = "mapred.output.path"; public static final String MAPRED_DC_FILE = "mapred.dc.file"; public static final String HBASE_ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum"; public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent"; public static final String HBASE_WRITE_ENABLE = "hbase.write.enable"; public static final String MAPRED_REDUCE_TASKS = "mapred.reduce.tasks"; public static final String BZXR_TABLE_NAME = "bzxr.table.name"; public IncrementalPut(String base) { PropertyConfigurator.configure(base + "/cfg/log4j.properties"); ConfigManager cm = new ConfigManager(base + "/cfg/chinadaas.xml"); Configuration conf = new Configuration(); conf.set(MAPRED_INPUT_PATH, cm.getString(MAPRED_INPUT_PATH)); conf.set(MAPRED_OUTPUT_PATH, cm.getString(MAPRED_OUTPUT_PATH)); conf.set(MAPRED_DC_FILE, cm.getString(MAPRED_DC_FILE)); conf.set(HBASE_ZOOKEEPER_QUORUM, cm.getString(HBASE_ZOOKEEPER_QUORUM)); conf.set(ZOOKEEPER_ZNODE_PARENT, cm.getString(ZOOKEEPER_ZNODE_PARENT)); conf.setBoolean(HBASE_WRITE_ENABLE, cm.getBoolean(HBASE_WRITE_ENABLE, false)); conf.set(MAPRED_REDUCE_TASKS, cm.getString(MAPRED_REDUCE_TASKS)); conf.set(BZXR_TABLE_NAME, cm.getString(BZXR_TABLE_NAME)); setConf(conf); } public static class M extends Mapper<LongWritable, Text, Text, Text> { private Text mapKey = new Text(); private Text mapValue = new Text(); private HConnection connection; private HTableInterface primary; private HTableInterface indexName; private HTableInterface indexCard; private boolean hbaseEnable = false; private Map<String, String> areaCode = new HashMap<String, String>(); @Override protected void setup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); hbaseEnable = conf.getBoolean(HBASE_WRITE_ENABLE, false); System.out.println(HBASE_WRITE_ENABLE + ": " + hbaseEnable); if (hbaseEnable) { String tableName = conf.get(BZXR_TABLE_NAME); connection = HConnectionManager.createConnection(conf); primary = connection.getTable(Bytes.toBytes(tableName)); indexName = connection.getTable(Bytes.toBytes(tableName + INDEX_NAME_POSTFIX)); indexCard = connection.getTable(Bytes.toBytes(tableName + INDEX_CARD_POSTFIX)); } Path[] dcFiles = new Path[0]; try { dcFiles = DistributedCache.getLocalCacheFiles(context .getConfiguration()); for (Path dcFile : dcFiles) { if (!dcFile.toString().endsWith(".jar")) { parseFile(dcFile); } } } catch (IOException ioe) { } } protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String record = value.toString(); DisBzxr rec = new DisBzxr(); rec.parse(record); rec.processing(areaCode); String rowKey = rec.getId(); String name = rec.getInameClean(); String card = rec.getInameClean(); if (hbaseEnable) { // 写入主表 Put p = new Put(Bytes.toBytes(rowKey)); p.add(Bytes.toBytes("f"), Bytes.toBytes("A"), Bytes.toBytes(rec.toString("\u0001"))); primary.put(p); primary.flushCommits(); // 写入名称索引表 if (!name.isEmpty()) { p = new Put(Bytes.toBytes(name)); p.add(Bytes.toBytes("f"), Bytes.toBytes(rowKey), Bytes.toBytes("")); indexName.put(p); indexName.flushCommits(); } // 写入证件索引表 card = rec.getCardnumClean(); if (!card.isEmpty()) { p = new Put(Bytes.toBytes(card)); p.add(Bytes.toBytes("f"), Bytes.toBytes(rowKey), Bytes.toBytes("")); indexCard.put(p); indexCard.flushCommits(); } } else { mapKey.set(rec.getId()); mapValue.set(rec.toString("\t")); context.write(mapKey, mapValue); } } @Override protected void cleanup(Context context) throws IOException, InterruptedException { if (hbaseEnable) { primary.close(); indexName.close(); indexCard.close(); connection.close(); } } private void parseFile(Path dcFile) { try { BufferedReader bf = new BufferedReader(new FileReader( dcFile.toString())); String line; String[] subs; while ((line = bf.readLine()) != null) { subs = line.split("\u0001"); if (subs.length > 2 && !subs[2].equals("null")) { areaCode.put(subs[1], subs[2]); } } bf.close(); } catch (IOException ioe) { } } } public static class R extends Reducer<Text, Text, Text, Text> { public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { for (Text value : values) { context.write(key, value); } } } //@Override public int run(String[] arg0) throws Exception { Configuration conf = getConf(); Job job = new Job(conf, this.getClass().getSimpleName()); job.setJarByClass(IncrementalPut.class); job.setMapperClass(M.class); job.setReducerClass(R.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); Path in = new Path(conf.get(MAPRED_INPUT_PATH)); FileInputFormat.addInputPath(job, in); Path out = new Path(conf.get(MAPRED_OUTPUT_PATH)); FileOutputFormat.setOutputPath(job, out); FileSystem fs = FileSystem.get(conf); fs.delete(out, true); DistributedCache .addFileToClassPath(new Path( "/tmp/lib/hbase-0.94.11-transwarp.jar"), job .getConfiguration()); DistributedCache.addFileToClassPath(new Path( "/tmp/lib/zookeeper-3.4.5-transwarp.jar"), job .getConfiguration()); String dcFile = conf.get(MAPRED_DC_FILE); DistributedCache.addCacheFile(new Path(dcFile).toUri(), job.getConfiguration()); return job.waitForCompletion(true) ? 0 : 1; } public static void main(String[] args) throws Exception { if (args == null || args.length != 1) { System.out.println("Please input base dir"); System.exit(0); } String base = args[0]; int res = ToolRunner.run(new IncrementalPut(base), args); System.exit(res); } }

50,528

社区成员

发帖
与我相关
我的任务
社区描述
Java相关技术讨论
javaspring bootspring cloud 技术论坛(原bbs)
社区管理员
  • Java相关社区
  • 小虚竹
  • 谙忆
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧