赞
踩
1、pom.xml配置
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<scope>2.1.1</scope>
</dependency>
2、驱动
org.apache.hive.jdbc.HiveDriver
3、用传统改的Class.forName,然后DriverManager去拿。
1、pom.xml配置
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-jdbc</artifactId>
- <scope>2.1.1</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-client</artifactId>
- <version>2.7.3</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <version>2.7.7</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-core</artifactId>
- <version>2.7.7</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <version>2.7.3</version>
- </dependency>
-
2、准备好配置文件
在使用idea开发,spark程序时,需要在开发环境模拟hadoop环境,不然,每次都要打jar去集群环境执行调试程序,严重影响开发效率。
winutils.exe是在Windows系统上需要的hadoop调试环境工具,里面包含一些在Windows系统下调试hadoop、spark所需要的基本的工具类
下载及配置winutils.exe:使用winutils.exe,在windows开发环境下,调试下调试hadoop、spark程序-CSDN博客
3、例子
- package org.example;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.security.UserGroupInformation;
- import java.io.IOException;
- import java.sql.Connection;
- import java.sql.DriverManager;
- import java.sql.PreparedStatement;
- import java.sql.ResultSet;
- import java.sql.SQLException;
- public class Main {
- private static String url = "jdbc:hive2://cdh-mgm01:10000/default;principal=hive/cdh-mgm01@FEHORIZON.COM";
- private static Connection conn = null;
- private static PreparedStatement ps = null;
- private static ResultSet rs = null;
- public static void main(String[] args) throws SQLException {
- String path ="D:\\work\\Kerberos5\\"
- \\System.out.println(System.getProperty("user.dir") + "/krb5.conf");
- Configuration conf = new Configuration();
- conf.set("hadoop.security.authentication", "Kerberos");
- System.setProperty("krb5_ini", path + "krb5.conf");
- System.setProperty("hive_keytab", path + "\hive.keytab");
- //Windows系统 --- winutils.exe文件位置
- System.setProperty("hadoop.home.dir","p:\\work\\hadoop-3.0.0")
- System.setProperty("java.security.krb5.conf", System.getProperty("krb5_ini"));
- UserGroupInformation.setConfiguration(conf);
- try {
- UserGroupInformation.loginUserFromKeytab("hive/cdh-mgm01@FEHORIZON.COM", System.getProperty("hive_keytab"));
- } catch (IOException e) {
- e.printStackTrace();
- }
- try {
- // 使用hive用户登陆
- conn = DriverManager.getConnection(url, "", "");
- } catch (SQLException e) {
- e.printStackTrace();
- }
- showTables();
- }
- public static void showTables() {
- try {
- // 进入default数据库
- ps = conn.prepareStatement("use default");
- ps.execute();
- // 展示所有表
- rs = ps.executeQuery("show tables");
- // 处理结果集
- while (rs.next()) {
- System.out.println(rs.getString(1));
- }
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- };
4、成功运行截图
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。