Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
D
DQMP
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
data
DQMP
Commits
aeefb9db
Commit
aeefb9db
authored
Apr 03, 2020
by
赵建伟
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update codes
parent
e347faf2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
150 additions
and
3 deletions
+150
-3
HiveDruidPool.java
src/main/java/com/gmei/data/dqmp/pool/HiveDruidPool.java
+136
-0
jdbc.properties
src/main/resources/jdbc.properties
+14
-3
No files found.
src/main/java/com/gmei/data/dqmp/pool/HiveDruidPool.java
0 → 100644
View file @
aeefb9db
package
com
.
gmei
.
data
.
dqmp
.
pool
;
import
com.alibaba.druid.pool.DruidDataSource
;
import
jodd.util.PropertiesUtil
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.sql.DataSource
;
import
java.io.FileNotFoundException
;
import
java.io.IOException
;
import
java.io.InputStream
;
import
java.sql.*
;
import
java.util.Properties
;
/**
* @ClassName HiveDruidPool
* @Author apple
* @Date 2020/4/3
* @Version V1.0
**/
public
class
HiveDruidPool
{
public
static
Connection
conn
=
null
;
private
static
Properties
prop
;
private
static
DruidDataSource
hiveDataSource
=
new
DruidDataSource
();
private
static
final
Logger
log
=
LoggerFactory
.
getLogger
(
HiveDruidPool
.
class
);
/**
* 获取连接
* @return
*/
public
static
Connection
getConnection
(){
try
{
hiveDataSource
=
getHiveDataSource
();
conn
=
hiveDataSource
.
getConnection
();
}
catch
(
SQLException
e
)
{
log
.
error
(
"--"
+
e
+
":获取Hive连接失败!"
);
}
return
conn
;
}
/**
* 释放连接
*/
public
static
void
releaseConnection
(){
try
{
if
(
conn
!=
null
){
conn
.
close
();
}
}
catch
(
SQLException
e
)
{
log
.
error
(
"--"
+
e
+
":关闭Hive-conn连接失败!"
);
}
}
private
static
DruidDataSource
getHiveDataSource
()
{
if
(
hiveDataSource
.
isInited
()){
return
hiveDataSource
;
}
try
{
Properties
dsProp
=
getProperties
(
"jdbc.properties"
);
//基本属性 url、user、password
hiveDataSource
.
setUrl
(
dsProp
.
getProperty
(
"hive_jdbc_url"
));
hiveDataSource
.
setUsername
(
dsProp
.
getProperty
(
"hive_jdbc_username"
));
hiveDataSource
.
setPassword
(
dsProp
.
getProperty
(
"hive_jdbc_password"
));
//配置初始化大小、最小、最大
hiveDataSource
.
setInitialSize
(
Integer
.
parseInt
(
dsProp
.
getProperty
(
"hive_initialSize"
)));
hiveDataSource
.
setMinIdle
(
Integer
.
parseInt
(
dsProp
.
getProperty
(
"hive_minIdle"
)));
hiveDataSource
.
setMaxActive
(
Integer
.
parseInt
(
dsProp
.
getProperty
(
"hive_maxActive"
)));
//配置获取连接等待超时的时间
hiveDataSource
.
setMaxWait
(
Integer
.
parseInt
(
dsProp
.
getProperty
(
"hive_maxWait"
)));
//配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
hiveDataSource
.
setTimeBetweenEvictionRunsMillis
(
60000
);
//配置一个连接在池中最小生存的时间,单位是毫秒
hiveDataSource
.
setMinEvictableIdleTimeMillis
(
300000
);
hiveDataSource
.
setTestWhileIdle
(
false
);
//打开PSCache,并且指定每个连接上PSCache的大小
hiveDataSource
.
setPoolPreparedStatements
(
true
);
hiveDataSource
.
setMaxPoolPreparedStatementPerConnectionSize
(
20
);
hiveDataSource
.
init
();
}
catch
(
SQLException
e
)
{
e
.
printStackTrace
();
closeHiveDataSource
();
}
return
hiveDataSource
;
}
/**
*@Description:关闭Hive连接池
*/
private
static
void
closeHiveDataSource
(){
if
(
hiveDataSource
!=
null
){
hiveDataSource
.
close
();
}
}
/**
* 获取属性文件对象
* @param path
* @return
*/
private
static
Properties
getProperties
(
String
path
)
{
Properties
properties
=
new
Properties
();
try
{
InputStream
inputStream
=
JdbcConnectPool
.
class
.
getClassLoader
().
getResourceAsStream
(
path
);
properties
.
load
(
inputStream
);
inputStream
.
close
();
}
catch
(
FileNotFoundException
e
)
{
e
.
printStackTrace
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
return
properties
;
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
DataSource
ds
=
HiveDruidPool
.
getHiveDataSource
();
Connection
conn
=
ds
.
getConnection
();
Statement
stmt
=
null
;
if
(
conn
==
null
){
System
.
out
.
println
(
"null"
);
}
else
{
System
.
out
.
println
(
"conn"
);
stmt
=
conn
.
createStatement
();
ResultSet
res
=
stmt
.
executeQuery
(
"select * from xxxx t"
);
int
i
=
0
;
while
(
res
.
next
()){
if
(
i
<
10
){
System
.
out
.
println
(
res
.
getString
(
1
));
i
++;
}
}
}
stmt
.
close
();
conn
.
close
();
}
}
src/main/resources/jdbc.properties
View file @
aeefb9db
#base
#
#
base
driverClassName
=
org.apache.hive.jdbc.HiveDriver
driverClassName
=
org.apache.hive.jdbc.HiveDriver
#url=jdbc:hive2://152.136.57.57:10010
#url=jdbc:hive2://152.136.57.57:10010
url
=
jdbc:hive2://bj-gm-prod-cos-datacenter005:10010
url
=
jdbc:hive2://bj-gm-prod-cos-datacenter005:10010
username
=
data
username
=
data
password
=
password
=
#other
initSize
=
10
initSize
=
10
maxSize
=
20
maxSize
=
20
## druid pool
hive_jdbc_url
=
jdbc:hive2://bj-gm-prod-cos-datacenter005:10010
hive.dbname
=
online
hive_jdbc_username
=
data
hive_jdbc_password
=
#配置初始化大小、最小、最大
hive_initialSize
=
20
hive_minIdle
=
20
hive_maxActive
=
500
#配置获取连接等待超时的时间
hive_maxWait
=
60000
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment