hive配置权限后无法创建数据库,可以建表 hadoop

爱小天天 2018-01-05 11:32:39
根据教程配置hive权限后出现无法创建数据库的问题。hive配置如下

<?xml version="1.0" encoding="UTF-8"?>

<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://hadoop103:3306/hive?createDatabaseIfNotExist=true</value>
</property>

<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>

<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
</property>

<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>123456</value>
</property>

<property>
<name>hive.exec.scratchdir</name>
<value>/my/opt/beh/metadata/hive/hivescratchdir/hive-${user.name}</value>
</property>

<property>
<name>hive.exec.parallel</name>
<value>true</value>
</property>

<property>
<name>hive.exec.parallel.thread.number</name>
<value>16</value>
</property>
<!--
<property>
<name>hive.cli.print.header</name>
<value>true</value>
</property>
-->
<property>
<name>hive.cli.print.row.to.vertical</name>
<value>true</value>
</property>

<property>
<name>hive.cli.print.current.db</name>
<value>true</value>
</property>
<!--
<property>
<name>hive.exec.mode.local.auto</name>
<value>true</value>
<description>enable local mode</description>
</property>
-->
<property>
<name>hive.auto.convert.join</name>
<value>true</value>
<description>automatically enable mapside join</description>
</property>

<property>
<name>hive.mapjoin.smalltable.filesize</name>
<value>250000000</value>
<description>small table size (bytes)</description>
</property>

<property>
<name>hive.exec.dynamic.partition</name>
<value>true</value>
<description>enable dynamic partition</description>
</property>

<property>
<name>hive.exec.dynamic.partition.mode</name>
<value>strict</value>
<description>whether or not allow all the partitions are dynamic</description>
</property>

<property>
<name>hive.exec.max.dynamic.partitions.pernode</name>
<value>1000</value>
<description>number of partitions that one dynamic clause can create</description>
</property>

<property>
<name>hive.mapred.mode</name>
<value>strict</value>
</property>

<property>
<name>hive.hwi.listen.host</name>
<value>hadoop101</value>
<description>This is the host address the Hive Web Interface will listen on</description>
</property>

<property>
<name>hive.hwi.listen.port</name>
<value>19999</value>
<description>This is the port the Hive Web Interface will listen on</description>
</property>

<property>
<name>hive.hwi.war.file</name>
<value>lib/hive-hwi-0.12.0.war</value>
<description>This is the WAR file with the jsp content for Hive Web Interface</description>
</property>

<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.server2.thrift.bind.host</name>
<value>hadoop101</value>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/my/user/hive/warehouse</value>
<description>location of default database for the warehouse</description>
</property>
<!--compress-->

<property>
<name>hive.default.fileformat</name>
<value>RCFile</value>
</property>
<property>
<name>hive.exec.compress.output</name>
<value>true</value>
<description> This controls whether the final outputs of a query (to a local/HDFS
file or a Hive table) is compressed. The compression codec and other options
are determined from Hadoop config variables mapred.output.compress*
</description>
</property>

<property>
<name>hive.files.umask.value</name>
<value>0002</value>
</property>
<property>
<name>hive.metastore.authorization.storage.checks</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.execute.setugi</name>
<value>true</value>
</property>

<property>
<name>hive.security.authorization.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.security.authorization.createtable.owner.grants</name>
<value>ALL</value>
</property>
<property>
<name>hive.security.authorization.task.factory</name>
<value>org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl</value>
</property>
<property>
<name>hive.semantic.analyzer.hook</name>
<value>com.***.hive.security.AuthHook</value>
</property>

<property>
<name>hive.semantic.analyzer.hook.admin</name>
<value>hadoop</value>
</property>
</configuration>



hive (default)> set hive.semantic.analyzer.hook.admin ;
hive.semantic.analyzer.hook.admin=hadoop
hive (default)> grant select on database default to user test;
OK
Time taken: 1.093 seconds
hive (default)> show grant;
OK
admin ROLE ALL true 1515120168000 admin
default test USER SELECT false 1515120224000 hadoop
Time taken: 1.708 seconds, Fetched: 2 row(s)
hive (default)> CREATE SCHEMA userdb;
Authorization failed:No privilege 'Create' found for outputs { }. Use SHOW GRANT to get more details.
hive (default)> CREATE table test(a string);
OK
Time taken: 1.355 seconds
hive (default)> show databases;
OK
default
Time taken: 0.084 seconds, Fetched: 1 row(s)
...全文
5320 1 打赏 收藏 转发到动态 举报
写回复
用AI写文章
1 条回复
切换为时间正序
请发表友善的回复…
发表回复
bujianl123 2019-03-18
  • 打赏
  • 举报
回复
3、终于找到解决问题的办法。
(1)vi /etc/Hive/conf.cloudera.hive/hive-site.xml,在Hive的配置文件中加入:
<property>
<name>hive.users.in.admin.role</name>
<value>hdfs</value>
</property>
(2)保存配置文件,分发到集群,重启集群。
(3)切换到hdfs,进入Hive命令行,执行:
grant all to user root;
给账号root授权全部权限
(4)再执行create database test;
终于成功了。
(5)取消全部授权
revoke all from user root;

7,388

社区成员

发帖
与我相关
我的任务
社区描述
其他数据库开发 数据仓库
社区管理员
  • 数据仓库
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧