7,388
社区成员
发帖
与我相关
我的任务
分享
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://hadoop103:3306/hive?createDatabaseIfNotExist=true</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>123456</value>
</property>
<property>
<name>hive.exec.scratchdir</name>
<value>/my/opt/beh/metadata/hive/hivescratchdir/hive-${user.name}</value>
</property>
<property>
<name>hive.exec.parallel</name>
<value>true</value>
</property>
<property>
<name>hive.exec.parallel.thread.number</name>
<value>16</value>
</property>
<!--
<property>
<name>hive.cli.print.header</name>
<value>true</value>
</property>
-->
<property>
<name>hive.cli.print.row.to.vertical</name>
<value>true</value>
</property>
<property>
<name>hive.cli.print.current.db</name>
<value>true</value>
</property>
<!--
<property>
<name>hive.exec.mode.local.auto</name>
<value>true</value>
<description>enable local mode</description>
</property>
-->
<property>
<name>hive.auto.convert.join</name>
<value>true</value>
<description>automatically enable mapside join</description>
</property>
<property>
<name>hive.mapjoin.smalltable.filesize</name>
<value>250000000</value>
<description>small table size (bytes)</description>
</property>
<property>
<name>hive.exec.dynamic.partition</name>
<value>true</value>
<description>enable dynamic partition</description>
</property>
<property>
<name>hive.exec.dynamic.partition.mode</name>
<value>strict</value>
<description>whether or not allow all the partitions are dynamic</description>
</property>
<property>
<name>hive.exec.max.dynamic.partitions.pernode</name>
<value>1000</value>
<description>number of partitions that one dynamic clause can create</description>
</property>
<property>
<name>hive.mapred.mode</name>
<value>strict</value>
</property>
<property>
<name>hive.hwi.listen.host</name>
<value>hadoop101</value>
<description>This is the host address the Hive Web Interface will listen on</description>
</property>
<property>
<name>hive.hwi.listen.port</name>
<value>19999</value>
<description>This is the port the Hive Web Interface will listen on</description>
</property>
<property>
<name>hive.hwi.war.file</name>
<value>lib/hive-hwi-0.12.0.war</value>
<description>This is the WAR file with the jsp content for Hive Web Interface</description>
</property>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.server2.thrift.bind.host</name>
<value>hadoop101</value>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/my/user/hive/warehouse</value>
<description>location of default database for the warehouse</description>
</property>
<!--compress-->
<property>
<name>hive.default.fileformat</name>
<value>RCFile</value>
</property>
<property>
<name>hive.exec.compress.output</name>
<value>true</value>
<description> This controls whether the final outputs of a query (to a local/HDFS
file or a Hive table) is compressed. The compression codec and other options
are determined from Hadoop config variables mapred.output.compress*
</description>
</property>
<property>
<name>hive.files.umask.value</name>
<value>0002</value>
</property>
<property>
<name>hive.metastore.authorization.storage.checks</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.execute.setugi</name>
<value>true</value>
</property>
<property>
<name>hive.security.authorization.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.security.authorization.createtable.owner.grants</name>
<value>ALL</value>
</property>
<property>
<name>hive.security.authorization.task.factory</name>
<value>org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl</value>
</property>
<property>
<name>hive.semantic.analyzer.hook</name>
<value>com.***.hive.security.AuthHook</value>
</property>
<property>
<name>hive.semantic.analyzer.hook.admin</name>
<value>hadoop</value>
</property>
</configuration>
hive (default)> set hive.semantic.analyzer.hook.admin ;
hive.semantic.analyzer.hook.admin=hadoop
hive (default)> grant select on database default to user test;
OK
Time taken: 1.093 seconds
hive (default)> show grant;
OK
admin ROLE ALL true 1515120168000 admin
default test USER SELECT false 1515120224000 hadoop
Time taken: 1.708 seconds, Fetched: 2 row(s)
hive (default)> CREATE SCHEMA userdb;
Authorization failed:No privilege 'Create' found for outputs { }. Use SHOW GRANT to get more details.
hive (default)> CREATE table test(a string);
OK
Time taken: 1.355 seconds
hive (default)> show databases;
OK
default
Time taken: 0.084 seconds, Fetched: 1 row(s)