示例工程开发

最近更新时间: 2026-03-13 09:03:00

环境准备

开发环境准备

准备项说明
安装JDKJDK 8、JDK11 ,推荐使用 konaJDK,下载地址
安装和配置 IDE按需选择,比如 IntelliJ IDEA 或 Eclipse,示例使用IDEA
安装 Maven开发环境基础配置,负责构建 Java 应用程序
Maven 配置准备如果需要本地调试,需要参考 6.开发环境准备 配置 Maven settings.xml,推荐 Maven 3.6.3,下载地址

导入示例工程代码

以下以 IntelliJ IDEA 举例,将示例工程代码导入进行说明。
2.1 下载样例代码:https://g-necm8077.coding.net/public/tencentcloud-tbds-examples/tbds-examples/git/files/master

克隆或者直接下载master代码都可以

git clone https://g-necm8077.coding.net/public/tencentcloud-tbds-examples/tbds-examples

2.2 导入项目,然后选择JDK、MAVEN和settings文件。

样例代码

功能说明

演示的实例是样例代码通过JDBC的方式连接和访问Hive引擎

POM 文件

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>com.tencent.tbds</groupId>
    <artifactId>HiveExample</artifactId>
    <version>1.0-SNAPSHOT</version>
    <properties>
        <hadoop.version>3.2.2-TBDS-5.3.1.3</hadoop.version>
        <hive.version>3.1.3-TBDS-5.3.1.3</hive.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-auth</artifactId>
            <version>${hadoop.version}</version>
            <exclusions>
                <exclusion>
                    <artifactId>log4j-1.2-api</artifactId>
                    <groupId>org.apache.logging.log4j</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>log4j-slf4j-impl</artifactId>
                    <groupId>org.apache.logging.log4j</groupId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-common</artifactId>
            <version>${hive.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-shims</artifactId>
            <version>${hive.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}</version>
            <exclusions>
                <exclusion>
                    <artifactId>log4j-1.2-api</artifactId>
                    <groupId>org.apache.logging.log4j</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>log4j-slf4j-impl</artifactId>
                    <groupId>org.apache.logging.log4j</groupId>
                </exclusion>
            </exclusions>
        </dependency>

        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>${hive.version}</version>
            <exclusions>
                <exclusion>
                    <groupId>org.apache.hadoop</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hbase</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>log4j</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
            <!--<scope>compile</scope>-->
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.32</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-slf4j-impl</artifactId>
            <version>2.17.2</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-api</artifactId>
            <version>2.17.2</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <version>2.17.2</version>
        </dependency>

    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-assembly-plugin</artifactId>
                <version>2.3</version>
                <configuration>
                    <archive>
                        <manifest>
                            <mainClass>com.tencent.tbds.HiveExample</mainClass>
                        </manifest>
                    </archive>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                </configuration>
                <executions>
                    <execution>
                        <id>make-assemble</id>
                        <phase>package</phase>
                        <goals>
                            <goal>single</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>6</source>
                    <target>6</target>
                </configuration>
            </plugin>
        </plugins>
    </build>
</project>

示例代码

支持HS2直连;zk方式连接。
支持simple认证,krbs认证。

public class HiveExample {
    public static final org.slf4j.Logger logger = LoggerFactory.getLogger(HiveExample.class.getName());
    private static final String HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";

    private static final String ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME = "Client";
    private static final String ZOOKEEPER_SERVER_PRINCIPAL_KEY = "zookeeper.server.principal";
    private static String ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL = null;

    private static Configuration CONF = null;
    private static String KRB5_FILE = null;
    private static String USER_NAME = null;
    private static String USER_KEYTAB_FILE = null;

    /* zookeeper节点ip和端口列表 */
    private static Boolean isZookeeper= false;
    private static String zkQuorum = null;
    private static String auth = null;
    private static String sasl_qop = null;
    private static String zooKeeperNamespace = null;
    private static String serviceDiscoveryMode = null;
    private static String principal = null;
    private static String AUTH_HOST_NAME = null;
    private static String host = null;
    private static String port = null;

    /**
     * Get user realm process
     */
    public static String getUserRealm() {
        String serverRealm = System.getProperty("SERVER_REALM");
        if (serverRealm != null && serverRealm != "") {
            AUTH_HOST_NAME = "hadoop." + serverRealm.toLowerCase();
        } else {
            serverRealm = KerberosUtil.getKrb5DomainRealm();
            if (serverRealm != null && serverRealm != "") {
                AUTH_HOST_NAME = "hadoop." + serverRealm.toLowerCase();
            } else {
                AUTH_HOST_NAME = "hadoop";
            }
        }
        return AUTH_HOST_NAME;
    }

    private static void init() throws IOException {
        CONF = new Configuration();

        Properties clientInfo = null;
        InputStream fileInputStream = null;
        try {
            clientInfo = new Properties();
            String hiveclientProp = System.getProperty("config.file", "hiveclient.properties");
            File propertiesFile = new File(hiveclientProp);
            fileInputStream = new FileInputStream(propertiesFile);
            clientInfo.load(fileInputStream);
        } catch (IOException e) {
            throw new IOException(e);
        } finally {
            if (fileInputStream != null) {
                fileInputStream.close();
                fileInputStream = null;
            }
        }
        zkQuorum = clientInfo.getProperty("zk.quorum");
        auth = clientInfo.getProperty("auth");
        sasl_qop = clientInfo.getProperty("sasl.qop");
        zooKeeperNamespace = clientInfo.getProperty("zooKeeperNamespace");
        serviceDiscoveryMode = clientInfo.getProperty("serviceDiscoveryMode");
        principal = clientInfo.getProperty("principal");
        host = clientInfo.getProperty("host");
        port = clientInfo.getProperty("port");
        isZookeeper = Boolean.valueOf(clientInfo.getProperty("isZookeeper"));
        // 设置新建用户的USER_NAME,其中"xxx"指代之前创建的用户名,例如创建的用户为user,则USER_NAME为user
        USER_NAME = clientInfo.getProperty("user");

        if ("KERBEROS".equalsIgnoreCase(auth)) {
            // 设置客户端的keytab和krb5文件路径
            USER_KEYTAB_FILE = clientInfo.getProperty("keytab");
            KRB5_FILE = System.getProperty("java.security.krb5.conf", "krb5.conf");
            System.setProperty("java.security.krb5.conf", KRB5_FILE);
            ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL = "zookeeper/" + getUserRealm();
            System.setProperty(ZOOKEEPER_SERVER_PRINCIPAL_KEY, ZOOKEEPER_DEFAULT_SERVER_PRINCIPAL);
        }
        System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
    }

    public static void main(String[] args) throws ClassNotFoundException, SQLException, IOException {
        logger.info("Starting HiveExample application...");
        // 参数初始化
        init();
        // 定义HQL,HQL为单条语句,不能包含“;”
        String[] sqls = {
                "create table if not exists hive_test (id int, name string) row format delimited fields terminated by ',' ",
                "SELECT * FROM hive_test",
                "DROP TABLE hive_test"
        };
        // 拼接JDBC URL
        StringBuilder strBuilder = new StringBuilder("jdbc:hive2://");
        if (isZookeeper) {
            // 使用 ZooKeeper 方式连接
            strBuilder.append(zkQuorum)
                    .append("/")
                    .append(zooKeeperNamespace)
                    .append(";serviceDiscoveryMode=")
                    .append(serviceDiscoveryMode)
                    .append(";zooKeeperNamespace=")
                    .append(zooKeeperNamespace);
        } else {
            // 使用 HS2 方式连接
            strBuilder.append(host)
                    .append(":")
                    .append(port)
                    .append("/default");
        }
        if ("KERBEROS".equalsIgnoreCase(auth)) {
            strBuilder
                    .append(";principal=")
                    .append(principal)
                    .append("");
        } else {
            /* 普通模式 */
            strBuilder.append(";");
        }

        String url = strBuilder.toString();
        Class.forName(HIVE_DRIVER);
        Connection connection = null;
        try {
            // 获取JDBC连接
            connection = DriverManager.getConnection(url, USER_NAME, "");
            execDDL(connection, sqls[0]);
            logger.info("Create table success!");
            // 查询
            execDML(connection, sqls[1]);
            // 删表
            execDDL(connection, sqls[2]);
            logger.info("Delete table success!");
        } catch (SQLException e) {
            logger.error("Create connection failed : " + e.getMessage());
        } finally {
            // 关闭JDBC连接
            if (null != connection) {
                connection.close();
            }
        }
    }

    /**
     * Execute DDL Task process
     */
    public static void execDDL(Connection connection, String sql) throws SQLException {
        PreparedStatement statement = null;
        try {
            statement = connection.prepareStatement(sql);
            statement.execute();
        } finally {
            if (null != statement) {
                statement.close();
            }
        }
    }

    /**
     * Execute DML Task process
     */
    public static void execDML(Connection connection, String sql) throws SQLException {
        PreparedStatement statement = null;
        ResultSet resultSet = null;
        ResultSetMetaData resultMetaData = null;

        try {
            // 执行HQL
            statement = connection.prepareStatement(sql);
            resultSet = statement.executeQuery();

            // 输出查询的列名到控制台
            resultMetaData = resultSet.getMetaData();
            int columnCount = resultMetaData.getColumnCount();
            String resultMsg = "";
            for (int i = 1; i <= columnCount; i++) {
                resultMsg += resultMetaData.getColumnLabel(i) + '\t';
            }
            logger.info(resultMsg);

            // 输出查询结果到控制台
            while (resultSet.next()) {
                String result = "";
                for (int i = 1; i <= columnCount; i++) {
                    result += resultSet.getString(i) + '\t';
                }
                logger.info(result);
            }
        } finally {
            if (null != resultSet) {
                resultSet.close();
            }

            if (null != statement) {
                statement.close();
            }
        }
    }
}