Hive安装配置

1 在conf目录下vim 创建hive-site.xml

<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
    <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://bigdata1:3306/metastore?useSSL=false</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.jdbc.Driver</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>root</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>123456</value>
    </property>
    <property>
        <name>hive.metastore.warehouse.dir</name>
        <value>/user/hive/warehouse</value>
    </property>
    
    <property>
        <name>hive.server2.thrift.port</name>
        <value>10000</value>
    </property>
    <property>
        <name>hive.server2.thrift.bind.host</name>
        <value>bigdata1</value>
    </property>
    <property>
        <name>hive.metastore.uris</name>
        <value>thrift://bigdata1:9083</value>
    </property>
    <property>
        <name>hive.metastore.event.db.notification.api.auth</name>
        <value>false</value>
    </property>
    <property>
        <name>hive.metastore.schema.verification</name>
        <value>false</value>
    </property>
    <property>
        <name>hive.server2.active.passive.ha.enable</name>
        <value>true</value>
    </property>
</configuration>

2 配置hive-log4j2.propertieslate

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

status = INFO
name = HiveLog4j2
packages = org.apache.hadoop.hive.ql.log

# list of properties
property.hive.log.level = INFO
property.hive.root.logger = DRFA
#property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}
property.hive.log.dir = /opt/module/hive-3.1.2/logs
property.hive.log.file = hive.log
property.hive.perflogger.log.level = INFO

# list of all appenders
appenders = console, DRFA

# console appender
appender.console.type = Console
appender.console.name = console
appender.console.target = SYSTEM_ERR
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n

# daily rolling file appender
appender.DRFA.type = RollingRandomAccessFile
appender.DRFA.name = DRFA
appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
# Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
appender.DRFA.layout.type = PatternLayout
appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
appender.DRFA.policies.type = Policies
appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
appender.DRFA.policies.time.interval = 1
appender.DRFA.policies.time.modulate = true
appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.strategy.max = 30

# list of all loggers
loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, PerfLogger, AmazonAws, ApacheHttp

logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
logger.NIOServerCnxn.level = WARN

logger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO
logger.ClientCnxnSocketNIO.level = WARN

logger.DataNucleus.name = DataNucleus
logger.DataNucleus.level = ERROR

logger.Datastore.name = Datastore
logger.Datastore.level = ERROR

logger.JPOX.name = JPOX
logger.JPOX.level = ERROR

logger.AmazonAws.name=com.amazonaws
logger.AmazonAws.level = INFO

logger.ApacheHttp.name=org.apache.http
logger.ApacheHttp.level = INFO

logger.PerfLogger.name = org.apache.hadoop.hive.ql.log.PerfLogger
logger.PerfLogger.level = ${sys:hive.perflogger.log.level}

# root logger
rootLogger.level = ${sys:hive.log.level}
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}

3 删除 guava-19.0.jar并将hadoop中的这个jar移动过来

rm -rf guava-19.0.jar

cp /opt/module/hadoop-3.1.3/share/hadoop/common/lib/guava-27.0-jre.jar /opt/module/apache-hive-3.1.2-bin/lib/

4 将mysql-connector-java-5.1.37.jar 移动到lib目录下

 cp /opt/software/mysql-connector-java-5.1.37.jar /opt/module/apache-hive-3.1.2-bin/lib/

5 通过schematool相关命令执行初始化

 schematool -dbType mysql -initSchema -verbose

相关推荐

  1. 安装配置hive

    2024-04-03 18:50:04       22 阅读
  2. Hive安装配置

    2024-04-03 18:50:04       7 阅读
  3. Hive安装配置

    2024-04-03 18:50:04       4 阅读
  4. 头歌—Hive安装配置

    2024-04-03 18:50:04       27 阅读
  5. 安装Hive

    2024-04-03 18:50:04       24 阅读
  6. Hive安装配置、初始化元数据、启动

    2024-04-03 18:50:04       22 阅读

最近更新

  1. 【无标题】

    2024-04-03 18:50:04       0 阅读
  2. 冒烟测试(Smoke Testing)简介

    2024-04-03 18:50:04       0 阅读

热门阅读

  1. spark 编程案例

    2024-04-03 18:50:04       6 阅读
  2. Springboot3 集成knife4j(swagger)

    2024-04-03 18:50:04       6 阅读
  3. Chrome安装Vue插件vue-devtools的步骤

    2024-04-03 18:50:04       5 阅读
  4. 建立mysql测试数据

    2024-04-03 18:50:04       6 阅读
  5. postcss使用简明教程

    2024-04-03 18:50:04       5 阅读
  6. 泰坦尼克号幸存者预测

    2024-04-03 18:50:04       7 阅读
  7. 【Python 笔记1】字典

    2024-04-03 18:50:04       3 阅读
  8. Power Automate里的常用方法

    2024-04-03 18:50:04       5 阅读
  9. Kingbase简单存储过程

    2024-04-03 18:50:04       6 阅读