在hive中创建的表,用sparkSQL删除,结果报错,但是表被删除

weixin_38067167 2017-05-10 05:30:29
 开发四年只会写业务代码,分布式高并发都不会还做程序员?->>>     报错内容如下:华为大数据平台。sparksql创建的表删除就没有问题,hive创建的可以删除,但是会报如下错误,请问大神这是为什么? 17/05/10 17:19:14 ERROR DDLTask: org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322) at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1045) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322) at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ) at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:172) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039) ... 48 more FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322) at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ) 17/05/10 17:19:14 ERROR Driver: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322) at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ) 17/05/10 17:19:14 ERROR ClientWrapper: ====================== HIVE FAILURE OUTPUT ====================== SET spark.sql.bigdata.register.preExecutionRule=org.apache.spark.sql.hbase.execution.AddCoprocessor$,org.apache.spark.sql.execution.EnsureRowFormats$ SET spark.sql.ui.retainedExecutions=1000 SET spark.sql.sources.maxConcurrentWrites=5 SET spark.sql.autoBroadcastJoinThreshold=10485760 SET spark.sql.parquet.int96AsTimestamp=true SET spark.sql.parquet.cacheMetadata=true SET spark.sql.authorization.enabled=true SET spark.sql.bigdata.register.dialect=org.apache.spark.sql.hbase.HBaseSQLParser,org.apache.spark.sql.CarbonInternalSqlParser SET spark.sql.bigdata.register.strategyRule=org.apache.spark.sql.hbase.DummySparkPlanner,org.apache.spark.sql.hive.CarbonStrategy,org.apache.spark.sql.hive.CarbonDDLStrategy SET spark.sql.sources.parallelPartitionDiscovery.threshold=32 SET spark.sql.inMemoryColumnarStorage.partitionPruning=true SET spark.sql.bigdata.register.extendedResolutionRule=org.apache.spark.sql.hbase.catalyst.analysis.ReplaceOutput$ SET spark.sql.bigdata.register.analyseRule=org.apache.spark.sql.hive.acl.CarbonAccessControlRules SET spark.sql.tungsten.enabled=true SET spark.sql.parquet.binaryAsString=false SET spark.sql.inMemoryColumnarStorage.batchSize=10000 SET spark.sql.bigdata.initFunction=org.apache.spark.sql.hbase.HBaseEnv,org.apache.spark.sql.CarbonEnv SET spark.sql.parquet.compression.codec=gzip SET spark.sql.parquet.filterPushdown=true SET spark.sql.shuffle.partitions=200 SET spark.sql.caseSensitive=false SET spark.sql.inMemoryColumnarStorage.compressed=true SET spark.sql.dialect=org.apache.spark.sql.hive.huawei.BigSQLDialect SET spark.sql.broadcastTimeout=300 SET hive.support.sql11.reserved.keywords=false ====================== END HIVE FAILURE OUTPUT ====================== 17/05/10 17:19:14 ERROR SparkSQLDriver: Failed in [drop table adata.wang_solr] org.apache.spark.sql.execution.QueryExecutionException: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322) at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:951) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) org.apache.spark.sql.execution.QueryExecutionException: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313) at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322) at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107) at com.sun.proxy.$Proxy24.dropTable(Unknown Source) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039) at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732) at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:951) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337) at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283) at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282) at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325) at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934) at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871) at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815) at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253) at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)  
...全文
257 1 打赏 收藏 转发到动态 举报
写回复
用AI写文章
1 条回复
切换为时间正序
请发表友善的回复…
发表回复

433

社区成员

发帖
与我相关
我的任务
社区描述
其他技术讨论专区
其他 技术论坛(原bbs)
社区管理员
  • 其他技术讨论专区社区
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧