Commit 4ce0aa66 authored by 赵建伟's avatar 赵建伟

Merge branch 'zhaojianwei' into 'master'

add auth codes

See merge request !5
parents 37c84e7e 6233648d
59 23 * * * mysql -h172.16.30.130 -P3306 -udqmp -pWyUcSd2aPJBy dqmp -e "delete from dqmp.tbl_result_monitor_volatility where partition_date <= '`date -d "30 day ago" +"%Y-%m-%d"`'"
\ No newline at end of file
......@@ -219,6 +219,10 @@
<!-- 由于xxl-job的springboot版本比较低,所以此处暂不支持监控组件的集成 -->
<!-- <dependency> <groupId>de.codecentric</groupId> <artifactId>spring-boot-admin-starter-server</artifactId>
<version>2.1.0</version> </dependency> -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-security</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
......
......@@ -10,7 +10,7 @@ public class SparkSqlOperator {
public static void main(String[] args) {
try {
Connection conn = JdbcConnectPool.getConnect();
Connection conn = JdbcConnectPool.getConnection();
PreparedStatement ps = conn.prepareStatement("select count(*) count from online.test");
ResultSet rs = ps.executeQuery();
while (rs.next()) {
......
package com.gmei.data.dqmp.pool;
import com.alibaba.druid.pool.DruidDataSource;
import jodd.util.PropertiesUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.sql.*;
import java.util.Properties;
/**
* @ClassName HiveDruidPool
* @Author apple
* @Date 2020/4/3
* @Version V1.0
**/
public class HiveDruidPool {
public static Connection conn = null;
private static Properties prop;
private static DruidDataSource hiveDataSource = new DruidDataSource();
private static final Logger log = LoggerFactory.getLogger(HiveDruidPool.class);
/**
* 获取连接
* @return
*/
public static Connection getConnection(){
try {
hiveDataSource = getHiveDataSource();
conn = hiveDataSource.getConnection();
} catch (SQLException e) {
log.error("--"+e+":获取Hive连接失败!");
}
return conn;
}
/**
* 释放连接
*/
public static void releaseConnection(){
try {
if(conn != null){
conn.close();
}
} catch (SQLException e) {
log.error("--"+e+":关闭Hive-conn连接失败!");
}
}
private static DruidDataSource getHiveDataSource() {
if(hiveDataSource.isInited()){
return hiveDataSource;
}
try {
Properties dsProp = getProperties("jdbc.properties");
//基本属性 url、user、password
hiveDataSource.setUrl(dsProp.getProperty("hive_jdbc_url"));
hiveDataSource.setUsername(dsProp.getProperty("hive_jdbc_username"));
hiveDataSource.setPassword(dsProp.getProperty("hive_jdbc_password"));
//配置初始化大小、最小、最大
hiveDataSource.setInitialSize(Integer.parseInt(dsProp.getProperty("hive_initialSize")));
hiveDataSource.setMinIdle(Integer.parseInt(dsProp.getProperty("hive_minIdle")));
hiveDataSource.setMaxActive(Integer.parseInt(dsProp.getProperty("hive_maxActive")));
//配置获取连接等待超时的时间
hiveDataSource.setMaxWait(Integer.parseInt(dsProp.getProperty("hive_maxWait")));
//配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
hiveDataSource.setTimeBetweenEvictionRunsMillis(60000);
//配置一个连接在池中最小生存的时间,单位是毫秒
hiveDataSource.setMinEvictableIdleTimeMillis(300000);
hiveDataSource.setTestWhileIdle(false);
//打开PSCache,并且指定每个连接上PSCache的大小
hiveDataSource.setPoolPreparedStatements(true);
hiveDataSource.setMaxPoolPreparedStatementPerConnectionSize(20);
hiveDataSource.init();
} catch (SQLException e) {
e.printStackTrace();
closeHiveDataSource();
}
return hiveDataSource;
}
/**
*@Description:关闭Hive连接池
*/
private static void closeHiveDataSource(){
if(hiveDataSource != null){
hiveDataSource.close();
}
}
/**
* 获取属性文件对象
* @param path
* @return
*/
private static Properties getProperties(String path) {
Properties properties = new Properties();
try {
InputStream inputStream = JdbcConnectPool.class.getClassLoader().getResourceAsStream(path);
properties.load(inputStream);
inputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return properties;
}
public static void main(String[] args) throws Exception {
DataSource ds = HiveDruidPool.getHiveDataSource();
Connection conn = ds.getConnection();
Statement stmt = null;
if(conn == null){
System.out.println("null");
}else{
System.out.println("conn");
stmt = conn.createStatement();
ResultSet res = stmt.executeQuery("select * from xxxx t");
int i = 0;
while(res.next()){
if(i<10){
System.out.println(res.getString(1));
i++;
}
}
}
stmt.close();
conn.close();
}
}
package com.gmei.data.dqmp.pool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JdbcConnectPool {
private static final Logger logger = LoggerFactory.getLogger(JdbcConnectPool.class);
private static Properties prop;
private static int currentsize = 0;
private static LinkedList<Connection> connList = new LinkedList<Connection>();
private static int requestCount = 0;
private static final Logger logger = LoggerFactory.getLogger(JdbcConnectPool.class);
static {
prop = getProperties("jdbc.properties");
try {
......@@ -26,31 +22,11 @@ public class JdbcConnectPool {
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
for (int i = 0; i < Integer.valueOf(prop.getProperty("initSize")); i++) {
Connection connection = createConnect();
connList.add(connection);
currentsize++;
}
}
/**
* 获取连接
*
* @return
*/
public static Connection getConnect() {
Connection connection = null;
if (connList.size() > 0) {
connection = connList.getFirst();
connList.removeFirst();
} else if (connList.size() == 0 && currentsize < Integer.valueOf(prop.getProperty("maxSize"))) {
connList.addLast(createConnect());
connection = connList.getFirst();
connList.removeFirst();
currentsize++;
}
logger.info("Current pool size : {}",currentsize);
return connection;
public static Connection getConnection() {
logger.info("requestCount : {}",++requestCount);
return createConnect();
}
/**
......@@ -61,7 +37,9 @@ public class JdbcConnectPool {
private static Connection createConnect() {
Connection conn = null;
try {
conn = DriverManager.getConnection(prop.getProperty("url"), prop.getProperty("username"),
conn = DriverManager.getConnection(
prop.getProperty("url"),
prop.getProperty("username"),
prop.getProperty("password"));
} catch (SQLException e) {
e.printStackTrace();
......@@ -71,11 +49,14 @@ public class JdbcConnectPool {
/**
* 释放连接
*
* @param connection
*/
public static void releaseConnection(Connection connection) {
connList.addLast(connection);
public static void releaseConnection(Connection conn) {
try{
conn.close();
}catch (Exception e){
conn = null;
e.printStackTrace();
}
}
/**
......
package com.gmei.data.dqmp.pool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.Properties;
public class JdbcConnectPoolBak01 {
private static final Logger logger = LoggerFactory.getLogger(JdbcConnectPoolBak01.class);
private static Properties prop;
private static int requestCount = 0;
private static LinkedList<Connection> connList = new LinkedList<Connection>();
static {
prop = getProperties("jdbc.properties");
try {
Class.forName(prop.getProperty("driverClassName"));
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
for (int i = 0; i < Integer.valueOf(prop.getProperty("initSize")); i++) {
Connection connection = createConnect();
connList.add(connection);
}
}
/**
* 获取连接
*
* @return
*/
public static Connection getConnect00() {
Connection connection = null;
if (connList.size() > 0) {
connection = connList.getFirst();
connList.removeFirst();
} else{
while(connList.size() < Integer.valueOf(prop.getProperty("maxSize"))){
connList.addLast(createConnect());
}
connection = getConnect00();
}
requestCount ++;
logger.info("requestCount : {}",requestCount);
logger.info("connList size : {}",connList.size());
return connection;
}
public static Connection getConnection() {
Connection connection = null;
if (connList.size() > 0) {
connection = connList.getFirst();
connList.removeFirst();
} else{
connList.addLast(createConnect());
connection = getConnection();
}
logger.info("requestCount : {}",++requestCount);
logger.info("connList size : {}",connList.size());
return connection;
}
/**
* 创建连接
*
* @return
*/
private static Connection createConnect() {
Connection conn = null;
try {
conn = DriverManager.getConnection(
prop.getProperty("url"),
prop.getProperty("username"),
prop.getProperty("password"));
} catch (SQLException e) {
e.printStackTrace();
}
return conn;
}
/**
* 释放连接
*
* @param connection
*/
public static void releaseConnection(Connection connection) {
connList.addLast(connection);
}
/**
* 获取属性文件对象
*
* @param path
* @return
*/
private static Properties getProperties(String path) {
Properties properties = new Properties();
try {
InputStream inputStream = JdbcConnectPoolBak01.class.getClassLoader().getResourceAsStream(path);
properties.load(inputStream);
inputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return properties;
}
}
package com.gmei.data.dqmp.pool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.Properties;
public class JdbcConnectPoolBak02 {
private static final Logger logger = LoggerFactory.getLogger(JdbcConnectPoolBak02.class);
private static Properties prop;
private static int requestCount = 0;
private static LinkedList<Connection> connList = new LinkedList<Connection>();
static {
prop = getProperties("jdbc.properties");
try {
Class.forName(prop.getProperty("driverClassName"));
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
public static Connection getConnection() {
Connection connection = null;
if (connList.size() > 0) {
connection = connList.getFirst();
connList.removeFirst();
} else{
connList.addLast(createConnect());
connection = getConnection();
}
logger.info("requestCount : {}",++requestCount);
logger.info("connList size : {}",connList.size());
return connection;
}
/**
* 创建连接
*
* @return
*/
private static Connection createConnect() {
Connection conn = null;
try {
conn = DriverManager.getConnection(
prop.getProperty("url"),
prop.getProperty("username"),
prop.getProperty("password"));
} catch (SQLException e) {
e.printStackTrace();
}
return conn;
}
/**
* 释放连接
*
* @param connection
*/
public static void releaseConnection(Connection connection) {
connList.addLast(connection);
}
/**
* 获取属性文件对象
*
* @param path
* @return
*/
private static Properties getProperties(String path) {
Properties properties = new Properties();
try {
InputStream inputStream = JdbcConnectPoolBak02.class.getClassLoader().getResourceAsStream(path);
properties.load(inputStream);
inputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return properties;
}
}
......@@ -184,7 +184,7 @@ public class CheckServiceImpl implements CheckService {
return tblResultCheckUnique;
}
try {
Connection conn = JdbcConnectPool.getConnect();
Connection conn = JdbcConnectPool.getConnection();
PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
......@@ -221,7 +221,7 @@ public class CheckServiceImpl implements CheckService {
return tblResultCheckUnblank;
}
try {
Connection conn = JdbcConnectPool.getConnect();
Connection conn = JdbcConnectPool.getConnection();
PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
......@@ -258,7 +258,7 @@ public class CheckServiceImpl implements CheckService {
return tblResultCheckRefer;
}
try {
Connection conn = JdbcConnectPool.getConnect();
Connection conn = JdbcConnectPool.getConnection();
PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
......
......@@ -147,7 +147,7 @@ public class MonitorServiceImpl implements MonitorService {
return tblResultMonitorVolatility;
}
try {
Connection conn = JdbcConnectPool.getConnect();
Connection conn = JdbcConnectPool.getConnection();
PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
......@@ -186,7 +186,7 @@ public class MonitorServiceImpl implements MonitorService {
return tblResultMonitorSpecial;
}
try {
Connection conn = JdbcConnectPool.getConnect();
Connection conn = JdbcConnectPool.getConnection();
PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
......
package com.gmei.data.dqmp.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.Properties;
public class JdbcUtils {
private static final Logger logger = LoggerFactory.getLogger(JdbcUtils.class);
private static Properties prop;
static {
prop = getProperties("jdbc.properties");
try {
Class.forName(prop.getProperty("driverClassName"));
} catch (ClassNotFoundException e) {
e.printStackTrace();
logger.error(e.getMessage());
}
}
public static Connection getConnection() throws Exception {
Connection connection = DriverManager.getConnection(
prop.getProperty("url"),
prop.getProperty("username"),
prop.getProperty("password"));
return connection;
}
/**
* 获取属性文件对象
*
* @param path
* @return
*/
private static Properties getProperties(String path) {
Properties properties = new Properties();
try {
InputStream inputStream = JdbcUtils.class.getClassLoader().getResourceAsStream(path);
properties.load(inputStream);
inputStream.close();
} catch (Exception e) {
e.printStackTrace();
logger.error(e.getMessage());
}
return properties;
}
}
......@@ -95,3 +95,9 @@ xxl:
port: 9797
logpath: /tmp
logretentiondays: -1
#---login user config---
security:
user:
name: data
password: wqjYA16LVUurHsVi
\ No newline at end of file
......@@ -89,3 +89,9 @@ xxl:
port: 9797
logpath: /tmp
logretentiondays: -1
#---login user config---
security:
user:
name: data
password: wqjYA16LVUurHsVi
\ No newline at end of file
......@@ -89,3 +89,9 @@ xxl:
port: 9797
logpath: /tmp
logretentiondays: -1
#---login user config---
security:
user:
name: data
password: wqjYA16LVUurHsVi
\ No newline at end of file
#base
## base
driverClassName=org.apache.hive.jdbc.HiveDriver
#url=jdbc:hive2://152.136.57.57:10010
url=jdbc:hive2://bj-gm-prod-cos-datacenter007:10010
url=jdbc:hive2://bj-gm-prod-cos-datacenter005:10010
username=data
password=
#other
initSize=10
maxSize=20
## druid pool
hive_jdbc_url=jdbc:hive2://bj-gm-prod-cos-datacenter005:10010
hive.dbname=online
hive_jdbc_username=data
hive_jdbc_password=
#配置初始化大小、最小、最大
hive_initialSize=20
hive_minIdle=20
hive_maxActive=500
#配置获取连接等待超时的时间
hive_maxWait=60000
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment