sqoop2 提交job到yarn 结果FAILED 有日志

HeroismDayLess 2018-01-29 11:34:09
2018-01-29 11:14:27,194 FATAL [IPC Server handler 19 on 44394] org.apache.hadoop.mapred.TaskAttemptListenerImpl: Task: attempt_1517195320691_0001_m_000004_0 - exited : org.apache.sqoop.common.SqoopException: MAPRED_EXEC_0017:Error occurs during extractor run
at org.apache.sqoop.job.mr.SqoopMapper.runInternal(SqoopMapper.java:115)
at org.apache.sqoop.job.mr.SqoopMapper.access$000(SqoopMapper.java:49)
at org.apache.sqoop.job.mr.SqoopMapper$1.call(SqoopMapper.java:74)
at org.apache.sqoop.job.mr.SqoopMapper$1.call(SqoopMapper.java:71)
at org.apache.sqoop.utils.ClassUtils.executeWithClassLoader(ClassUtils.java:281)
at org.apache.sqoop.job.mr.SqoopMapper.run(SqoopMapper.java:70)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1746)
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
Caused by: org.apache.sqoop.common.SqoopException: GENERIC_HDFS_CONNECTOR_0001:Error occurs during extractor run
at org.apache.sqoop.connector.hdfs.HdfsExtractor.extract(HdfsExtractor.java:94)
at org.apache.sqoop.connector.hdfs.HdfsExtractor.extract(HdfsExtractor.java:63)
at org.apache.sqoop.job.mr.SqoopMapper.runInternal(SqoopMapper.java:111)
... 12 more
Caused by: org.apache.sqoop.common.SqoopException: MAPRED_EXEC_0013:Cannot write to the data writer
at org.apache.sqoop.job.mr.SqoopMapper$SqoopMapDataWriter.writeContent(SqoopMapper.java:169)
at org.apache.sqoop.job.mr.SqoopMapper$SqoopMapDataWriter.writeStringRecord(SqoopMapper.java:148)
at org.apache.sqoop.connector.hdfs.HdfsExtractor.extractRow(HdfsExtractor.java:293)
at org.apache.sqoop.connector.hdfs.HdfsExtractor.extractTextFile(HdfsExtractor.java:197)
at org.apache.sqoop.connector.hdfs.HdfsExtractor.extractFile(HdfsExtractor.java:112)
at org.apache.sqoop.connector.hdfs.HdfsExtractor.access$300(HdfsExtractor.java:63)
at org.apache.sqoop.connector.hdfs.HdfsExtractor$1.run(HdfsExtractor.java:88)
at org.apache.sqoop.connector.hdfs.HdfsExtractor$1.run(HdfsExtractor.java:79)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1746)
at org.apache.sqoop.connector.hdfs.HdfsExtractor.extract(HdfsExtractor.java:79)
... 14 more
Caused by: org.apache.sqoop.common.SqoopException: MAPRED_EXEC_0018:Error occurs during loader run
at org.apache.sqoop.job.mr.SqoopOutputFormatLoadExecutor$ConsumerThread.run(SqoopOutputFormatLoadExecutor.java:292)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.sqoop.common.SqoopException: GENERIC_JDBC_CONNECTOR_0001:Unable to get a connection
at org.apache.sqoop.connector.jdbc.GenericJdbcExecutor.<init>(GenericJdbcExecutor.java:121)
at org.apache.sqoop.connector.jdbc.GenericJdbcLoader.load(GenericJdbcLoader.java:36)
at org.apache.sqoop.connector.jdbc.GenericJdbcLoader.load(GenericJdbcLoader.java:25)
at org.apache.sqoop.job.mr.SqoopOutputFormatLoadExecutor$ConsumerThread$1.call(SqoopOutputFormatLoadExecutor.java:279)
at org.apache.sqoop.job.mr.SqoopOutputFormatLoadExecutor$ConsumerThread$1.call(SqoopOutputFormatLoadExecutor.java:260)
at org.apache.sqoop.utils.ClassUtils.executeWithClassLoader(ClassUtils.java:281)
at org.apache.sqoop.job.mr.SqoopOutputFormatLoadExecutor$ConsumerThread.run(SqoopOutputFormatLoadExecutor.java:259)
... 5 more
Caused by: java.sql.SQLRecoverableException: IO 错误: Connection reset
at oracle.jdbc.driver.T4CConnection.logon(T4CConnection.java:421)
at oracle.jdbc.driver.PhysicalConnection.<init>(PhysicalConnection.java:531)
at oracle.jdbc.driver.T4CConnection.<init>(T4CConnection.java:221)
at oracle.jdbc.driver.T4CDriverExtension.getConnection(T4CDriverExtension.java:32)
at oracle.jdbc.driver.OracleDriver.connect(OracleDriver.java:503)
at java.sql.DriverManager.getConnection(DriverManager.java:664)
at java.sql.DriverManager.getConnection(DriverManager.java:208)
at org.apache.sqoop.connector.jdbc.GenericJdbcExecutor.<init>(GenericJdbcExecutor.java:118)
... 11 more
Caused by: java.net.SocketException: Connection reset
at java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:115)
at java.net.SocketOutputStream.write(SocketOutputStream.java:155)
at oracle.net.ns.DataPacket.send(DataPacket.java:199)
at oracle.net.ns.NetOutputStream.flush(NetOutputStream.java:211)
at oracle.net.ns.NetInputStream.getNextPacket(NetInputStream.java:227)
at oracle.net.ns.NetInputStream.read(NetInputStream.java:175)
at oracle.net.ns.NetInputStream.read(NetInputStream.java:100)
at oracle.net.ns.NetInputStream.read(NetInputStream.java:85)
at oracle.jdbc.driver.T4CSocketInputStreamWrapper.readNextPacket(T4CSocketInputStreamWrapper.java:122)
at oracle.jdbc.driver.T4CSocketInputStreamWrapper.read(T4CSocketInputStreamWrapper.java:78)
at oracle.jdbc.driver.T4CMAREngine.unmarshalUB1(T4CMAREngine.java:1179)
at oracle.jdbc.driver.T4CMAREngine.unmarshalSB1(T4CMAREngine.java:1155)
at oracle.jdbc.driver.T4CTTIfun.receive(T4CTTIfun.java:279)
at oracle.jdbc.driver.T4CTTIfun.doRPC(T4CTTIfun.java:186)
at oracle.jdbc.driver.T4CTTIoauthenticate.doOAUTH(T4CTTIoauthenticate.java:366)
at oracle.jdbc.driver.T4CTTIoauthenticate.doOAUTH(T4CTTIoauthenticate.java:752)
at oracle.jdbc.driver.T4CConnection.logon(T4CConnection.java:359)
... 18 more
...全文
930 1 打赏 收藏 转发到动态 举报
写回复
用AI写文章
1 条回复
切换为时间正序
请发表友善的回复…
发表回复
HeroismDayLess 2018-01-29
  • 打赏
  • 举报
回复
各位大牛,帮忙看下这是什么原因?这个job任务是将hdfs上的数据导入到oracle表中 我试了几次都没成功,最好的一次 10万条数据 只成功导入3万条 然后over了!任何帮助 我都感激不尽!

20,807

社区成员

发帖
与我相关
我的任务
社区描述
Hadoop生态大数据交流社区,致力于有Hadoop,hive,Spark,Hbase,Flink,ClickHouse,Kafka,数据仓库,大数据集群运维技术分享和交流等。致力于收集优质的博客
社区管理员
  • 分布式计算/Hadoop社区
  • 涤生大数据
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧