赞
踩
# 下载
wget https://dev.mysql.com/get/mysql57-community-release-el7-9.noarch.rpm
# 安装
rpm -ivh mysql57-community-release-el7-9.noarch.rpm
cd /etc/yum.repos.d/
yum –y install mysql-server
# 安装所缺少的包后重新安装mysql-server即可
rpm --import https://repo.mysql.com/RPM-GPG-KEY-mysql-2022
yum install mysql-server
# 启动MySQL
systemctl start mysqld
# 获取登录密码
grep 'temporary password' /var/log/mysqld.log
# -p后面跟的密码为上一步获得的密码
mysql -uroot -plqjT3cA%swE9
# 修改mysql的用户root的密码
set password for root@localhost=password("Root@123");
# 创建hive数据库
create database hive_db;
# 授权hive用户对hive_db数据库的所有表具有操作权限,并且设置了密码为'Hive@123'
grant all privileges on hive_db.* to hive@'%' indentified by 'Hive@123';
# 刷新令更改马上生效
flush privileges;
到官网上找自己所需要的Hive压缩包版本下载:http://archive.apache.org/dist/hive/
如果嫌下载太慢,也可进入一些镜像网站下载如:https://repo.huaweicloud.com/apache/hive/
下载完将压缩包放置共享文件夹或通过软件上传至虚拟机中
# 将共享文件夹的hive压缩包移至/opt/softwares下
cp /mnt/hgfs/shares/apache-hive-2.3.7-bin.tar.gz /opt/softwares
# 解压至/opt/modules
tar -zxf /opt/softwares/apache-hive-2.3.7-bin.tar.gz -C /opt/modules
# 重命名,方便以后操作
cd /opt/modules/
mv apache-hive-2.3.7-bin/ hive-2.3.7
# 编辑文件profile
sudo vi /etc/profile
# 增加hive路径
export HIVE_HOME=/opt/modules/hive-2.3.7
export PATH=$PATH:$HIVE_HOME/bin
# 令配置文件生效
source /etc/profile
# 查看Hive版本
hive --version
# 复制hive-env.sh文件
cd /opt/modules/hive-2.3.7/conf/
cp hive-env.sh.template hive-env.sh
# 找到文件中的Hadoop路径,添加
export HADOOP_HOME=/opt/modules/hadoop-2.8.2
# 复制hive-site.xml文件
cp hive-default.xml.template hive-site.xml
vi hive-site.xml
# 在命令行模式输入/name进行查找,name在下面<name></name>中,找到后修改value中的值 <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> <description>Driver class name for a JDBC metastore</description> </property> <property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://192.168.168.101:3306/hive_db?createDatabaseIfNotExist=true&useSSL=false</value> <description> JDBC connect string for a JDBC metastore. </description> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>hive</value> <description>Username to use against metastore database</description> </property> <name>javax.jdo.option.ConnectionPassword</name> <value>Hive@123</value> <description>password to use against metastore database</description> </property> <property> <name>hive.metastore.uris</name> <value>thrift://192.168.168.101:9083</value> <description>Thrift URI for the remote metastore. Used by metastore client to connect to remote metastore.</description> </property> <property> <name>hive.cli.print.current.db</name> <value>true</value> <description>Whether to include the current database in the Hive prompt.</description> </property> <property> <name>hive.cli.print.header</name> <value>true</value> <description>Whether to print the names of the columns in query output.</description> </property>
# 直接复制,粘贴至hive-site.xml中
<property>
<name>system:java.io.tmpdir</name>
<value>/opt/modules/hive-2.3.7/tmp</value>
</property>
<property>
<name>system:user.name</name>
<value>hive</value>
</property>
<property>
<name>hive.metastore.local</name>
<value>false</value>
</property>
# 如果需要配合HBase使用修改以下 <property> <name>hive.zookeeper.quorum</name> <value>hadoop01:2181,hadoop02:2181,hadoop03:2181</value> </property> <property> <name>hive.aux.jars.path</name> <value> file:///opt/modules/hbase-2.4.1/lib/hbase-client-2.4.1.jar, file:///opt/modules/hbase-2.4.1/lib/hbase-common-2.4.1.jar, file:///opt/modules/hbase-2.4.1/lib/hbase-server-2.4.1.jar, file:///opt/modules/hbase-2.4.1/lib/hbase-hadoop2-compat-2.4.1.jar, file:///opt/modules/hbase-2.4.1/lib/netty-all-4.0.23.Final.jar, file:///opt/modules/hbase-2.4.1/lib/hbase-protocol-2.4.1.jar, file:///opt/modules/hbase-2.4.1/lib/zookeeper-3.5.7.jar </value> </property>
cp /mnt/hgfs/shares/mysql-connector-java-5.1.49.jar /opt/modules/hive-2.3.7/lib/
# 启动hdfs和yarn
start-dfs.sh
start-yarn.sh
# 创建数据仓库
hdfs dfs -chmod g+w /tmp
hdfs dfs -mkdir -p /user/hive/warehouse
hdfs dfs -chmod g+w /user/hive/warehouse
schematool -dbType mysql -initSchema
hive --service metastore &
sudo scp /etc/profile hadoop02:/etc/
sudo scp /etc/profile hadoop03:/etc/
scp -r hive-2.3.7 hadoop02:/opt/modules/
scp -r hive-2.3.7 hadoop03:/opt/modules/
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。