詹学伟
詹学伟
Published on 2024-04-21 / 15 Visits
0
0

springboot整合hive

1、添加pom依赖

<!-- 德鲁伊连接池依赖 -->
<dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>druid-spring-boot-starter</artifactId>
    <version>1.2.5</version>
</dependency>

<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-configuration-processor</artifactId>
    <optional>true</optional>
</dependency>

<!-- 添加hive依赖 -->
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>3.1.2</version>
    <exclusions>
        <exclusion>
            <groupId>org.eclipse.jetty</groupId>
            <artifactId>*</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-shims</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.glassfish</groupId>
            <artifactId>javax-el</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-slf4j-impl</artifactId>
        </exclusion>
    </exclusions>
</dependency>

2、yml配置文件

hive:
  url: jdbc:hive2://node3:10000/db_exercise
  driver-class-name: org.apache.hive.jdbc.HiveDriver
  type: com.alibaba.druid.pool.DruidDataSource
  user: root
  password: root

  # 下面为连接池的补充设置,应用到上面所有数据源中
  # 初始化大小,最小,最大
  initialSize: 1
  minIdle: 3
  maxActive: 20
  # 配置获取连接等待超时的时间
  maxWait: 60000
  # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
  timeBetweenEvictionRunsMillis: 60000
  # 配置一个连接在池中最小生存的时间,单位是毫秒
  minEvictableIdleTimeMillis: 30000
  validationQuery: select 1
  testWhileIdle: true
  testOnBorrow: false
  testOnReturn: false
  # 打开PSCache,并且指定每个连接上PSCache的大小
  poolPreparedStatements: true
  maxPoolPreparedStatementPerConnectionSize: 20

3、配置类

package com.demo.hive.config;

import javax.sql.DataSource;

import lombok.Data;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;

import com.alibaba.druid.pool.DruidDataSource;

@Data
@Configuration
@ConfigurationProperties(prefix = "hive")
public class HiveDruidConfig {
    private String url;
    private String user;
    private String password;
    private String driverClassName;
    private int initialSize;
    private int minIdle;
    private int maxActive;
    private int maxWait;
    private int timeBetweenEvictionRunsMillis;
    private int minEvictableIdleTimeMillis;
    private String validationQuery;
    private boolean testWhileIdle;
    private boolean testOnBorrow;
    private boolean testOnReturn;
    private boolean poolPreparedStatements;
    private int maxPoolPreparedStatementPerConnectionSize;

    @Bean(name = "hiveDruidDataSource")
    @Qualifier("hiveDruidDataSource")
    public DataSource dataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(user);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);

        // pool configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
        return datasource;
    }

    @Bean(name = "hiveDruidTemplate")
    public JdbcTemplate hiveDruidTemplate(@Qualifier("hiveDruidDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }
}

4、接口控制层

package com.demo.hive.controller;

import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;

import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;

/**
 * 使用 DataSource 操作 Hive
 */
@Slf4j
@RestController
public class HiveDataSourceController {

    @Autowired
    @Qualifier("hiveDruidDataSource")
    private DataSource druidDataSource;

    /**
     * 测试spring boot是否正常启动
     */
    @GetMapping("/")
    public String hello() {
        return "hello world";
    }

    /**
     * 列举当前Hive库中的所有数据表
     */
    @GetMapping("/table/list")
    public List<String> listAllTables() throws SQLException {
        List<String> list = new ArrayList<>();
        Statement statement = druidDataSource.getConnection().createStatement();
        String sql = "show tables";
        log.info("Running: " + sql);
        ResultSet res = statement.executeQuery(sql);
        while (res.next()) {
            list.add(res.getString(1));
        }
        return list;
    }

    /**
     * 查询指定tableName表中的数据
     */
    @GetMapping("/table/select")
    public List<String> selectFromTable(String tableName) throws SQLException {
        List<String> list = new ArrayList<>();
        Statement statement = druidDataSource.getConnection().createStatement();
        String sql = "select * from " + tableName + " limit 10";
        log.info("Running: " + sql);
        ResultSet res = statement.executeQuery(sql);
        int count = res.getMetaData().getColumnCount();
        StringBuilder str;
        while (res.next()) {
            str = new StringBuilder();
            for (int i = 1; i < count; i++) {
                String string = res.getString(i);
                log.info("string = {}", string);
                str.append(string).append(" ");
            }
            str.append(res.getString(count));
            log.info("str = {}", str.toString());
            list.add(str.toString());
        }
        return list;
    }
}


Comment