Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
F
flink_warehouse_rt
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
data
flink_warehouse_rt
Commits
2db50ff2
Commit
2db50ff2
authored
Jan 09, 2020
by
刘喆
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add checkpoint and uid and parallelism
parent
8ec1327e
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
58 additions
and
13 deletions
+58
-13
MlPreciseExposureDao.java
src/main/java/com/gmei/cache/MlPreciseExposureDao.java
+1
-0
BlPreciseExposureProcessFunction.java
...a/com/gmei/function/BlPreciseExposureProcessFunction.java
+2
-2
BlPreciseExposureWatermark.java
...in/java/com/gmei/function/BlPreciseExposureWatermark.java
+34
-0
BlMaiDianKafkaSource.java
src/main/java/com/gmei/source/BlMaiDianKafkaSource.java
+17
-10
PreciseExposureStreaming.java
...ain/java/com/gmei/streaming/PreciseExposureStreaming.java
+0
-0
BlMaiDianKafkaSourceTest.java
src/test/java/com/gmei/source/BlMaiDianKafkaSourceTest.java
+4
-1
No files found.
src/main/java/com/gmei/cache/MlPreciseExposureDao.java
View file @
2db50ff2
...
...
@@ -108,6 +108,7 @@ public class MlPreciseExposureDao {
" ?,\n"
+
" ?,\n"
+
" ?,\n"
+
" ?,\n"
+
" ?)"
;
List
<
Object
>
params
=
new
ArrayList
<
Object
>();
...
...
src/main/java/com/gmei/function/Bl
DistinctProcessAllWindow
Function.java
→
src/main/java/com/gmei/function/Bl
PreciseExposureProcess
Function.java
View file @
2db50ff2
...
...
@@ -11,7 +11,7 @@ import java.util.Map;
import
java.util.Set
;
/**
* ClassName: Bl
DistinctProcessAllWindow
Function
* ClassName: Bl
PreciseExposureProcess
Function
* Function:
* Reason: BL层数据去重器
* Date: 2020/1/8 下午5:06
...
...
@@ -19,7 +19,7 @@ import java.util.Set;
* @author liuzhe
* @since JDK 1.8
*/
public
class
Bl
DistinctProcessAllWindow
Function
extends
ProcessAllWindowFunction
<
BlPreciseExposureBean
,
BlPreciseExposureBean
,
TimeWindow
>
{
public
class
Bl
PreciseExposureProcess
Function
extends
ProcessAllWindowFunction
<
BlPreciseExposureBean
,
BlPreciseExposureBean
,
TimeWindow
>
{
@Override
public
void
process
(
Context
context
,
Iterable
<
BlPreciseExposureBean
>
iterable
,
Collector
<
BlPreciseExposureBean
>
collector
)
throws
Exception
{
Iterator
<
BlPreciseExposureBean
>
blPreciseExposureBeanIterator
=
iterable
.
iterator
();
...
...
src/main/java/com/gmei/function/BlPreciseExposureWatermark.java
0 → 100644
View file @
2db50ff2
package
com
.
gmei
.
function
;
import
com.gmei.bean.bl.BlPreciseExposureBean
;
import
org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks
;
import
org.apache.flink.streaming.api.watermark.Watermark
;
import
javax.annotation.Nullable
;
/**
* ClassName: BlPreciseExposureWatermark
* Function: TODO ADD FUNCTION.
* Reason: TODO ADD REASON.
* Date: 2020/1/8 下午8:40
*
* @author liuzhe
* @since JDK 1.8
*/
public
class
BlPreciseExposureWatermark
implements
AssignerWithPeriodicWatermarks
<
BlPreciseExposureBean
>
{
private
long
maxOutOfOrderness
=
10000
;
private
long
currentMaxTimestamp
;
@Override
public
long
extractTimestamp
(
BlPreciseExposureBean
blPreciseExposureBean
,
long
l
)
{
Double
timestampDouble
=
Double
.
parseDouble
(
blPreciseExposureBean
.
getGm_nginx_timestamp
());
long
timestamp
=
new
Double
(
timestampDouble
*
1000
).
longValue
();
currentMaxTimestamp
=
Math
.
max
(
timestamp
,
currentMaxTimestamp
);
return
timestamp
;
}
@Nullable
@Override
public
Watermark
getCurrentWatermark
()
{
return
new
Watermark
(
currentMaxTimestamp
-
maxOutOfOrderness
);
}
}
src/main/java/com/gmei/source/BlMaiDianKafkaSource.java
View file @
2db50ff2
...
...
@@ -6,6 +6,7 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011
;
import
java.nio.charset.Charset
;
import
java.text.SimpleDateFormat
;
import
java.util.Properties
;
...
...
@@ -23,15 +24,17 @@ public class BlMaiDianKafkaSource {
private
String
brokers
;
private
String
topic
;
private
String
groupId
;
private
String
startTime
;
public
BlMaiDianKafkaSource
()
{
}
public
BlMaiDianKafkaSource
(
String
brokers
,
String
topic
,
String
groupId
)
{
public
BlMaiDianKafkaSource
(
String
brokers
,
String
topic
,
String
groupId
,
String
startTime
)
{
this
.
brokers
=
brokers
;
this
.
topic
=
topic
;
this
.
groupId
=
groupId
;
this
.
startTime
=
startTime
;
}
/**
...
...
@@ -42,7 +45,7 @@ public class BlMaiDianKafkaSource {
* @author liuzhe
* @since JDK 1.8
*/
public
FlinkKafkaConsumer011
<
String
>
addSource
()
{
public
FlinkKafkaConsumer011
<
String
>
addSource
()
throws
Exception
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokers
);
props
.
put
(
"group.id"
,
groupId
);
...
...
@@ -51,10 +54,14 @@ public class BlMaiDianKafkaSource {
// props.put("auto.offset.reset", "earliest");
props
.
put
(
"key.deserializer"
,
"org.apache.kafka.common.serialization.StringDeserializer"
);
props
.
put
(
"value.deserializer"
,
"org.apache.kafka.common.serialization.StringDeserializer"
);
FlinkKafkaConsumer011
<
String
>
myConsumer
=
new
FlinkKafkaConsumer011
<
String
>(
topic
,
new
SimpleStringSchema
(
Charset
.
forName
(
"UTF-8"
)),
props
);
myConsumer
.
setStartFromGroupOffsets
();
//默认消费策略
// myConsumer.setStartFromEarliest();
return
myConsumer
;
FlinkKafkaConsumer011
<
String
>
flinkKafkaConsumer
=
new
FlinkKafkaConsumer011
<
String
>(
topic
,
new
SimpleStringSchema
(
Charset
.
forName
(
"UTF-8"
)),
props
);
// flinkKafkaConsumer.setStartFromGroupOffsets();//默认消费策略
if
(
startTime
!=
null
){
SimpleDateFormat
simpleDateFormat
=
new
SimpleDateFormat
(
"yyyy-MM-dd HH:mm:ss"
);
flinkKafkaConsumer
.
setStartFromTimestamp
(
simpleDateFormat
.
parse
(
startTime
).
getTime
());
}
// flinkKafkaConsumer.setStartFromEarliest();
return
flinkKafkaConsumer
;
}
public
DataStreamSource
<
String
>
addSource
(
StreamExecutionEnvironment
streamExecutionEnvironment
)
{
...
...
@@ -71,10 +78,10 @@ public class BlMaiDianKafkaSource {
props
.
put
(
"key.deserializer"
,
"org.apache.kafka.common.serialization.StringDeserializer"
);
props
.
put
(
"value.deserializer"
,
"org.apache.kafka.common.serialization.StringDeserializer"
);
FlinkKafkaConsumer011
<
String
>
my
Consumer
=
new
FlinkKafkaConsumer011
<
String
>(
topicName
,
new
SimpleStringSchema
(),
props
);
my
Consumer
.
setStartFromGroupOffsets
();
//默认消费策略
//
my
Consumer.setStartFromEarliest();
DataStreamSource
<
String
>
dataStreamSource
=
env
.
addSource
(
my
Consumer
);
FlinkKafkaConsumer011
<
String
>
flinkKafka
Consumer
=
new
FlinkKafkaConsumer011
<
String
>(
topicName
,
new
SimpleStringSchema
(),
props
);
flinkKafka
Consumer
.
setStartFromGroupOffsets
();
//默认消费策略
//
flinkKafka
Consumer.setStartFromEarliest();
DataStreamSource
<
String
>
dataStreamSource
=
env
.
addSource
(
flinkKafka
Consumer
);
return
dataStreamSource
;
}
}
src/main/java/com/gmei/streaming/PreciseExposureStreaming.java
View file @
2db50ff2
This diff is collapsed.
Click to expand it.
src/test/java/com/gmei/source/BlMaiDianKafkaSourceTest.java
View file @
2db50ff2
...
...
@@ -25,8 +25,10 @@ public class BlMaiDianKafkaSourceTest {
//
// env.setStateBackend(new Sta("file:///Users/mac/opt/flink/checkpoints",true));
String
brokers
=
"172.16.44.25:2181/gengmei"
;
String
topicName
=
"test"
;
String
groupId
=
"group1"
;
String
startTime
=
"2019-12-31 00:01:02"
;
//构造java.util.Properties对象
Properties
props
=
new
Properties
();
// 必须指定属性。
...
...
@@ -45,9 +47,10 @@ public class BlMaiDianKafkaSourceTest {
props
.
put
(
"value.deserializer"
,
"org.apache.kafka.common.serialization.StringDeserializer"
);
// FlinkKafkaConsumer011<ObjectNode> MyConsumer = new FlinkKafkaConsumer011<ObjectNode>(topicName, new JSONDeserializationSchema(), props);
FlinkKafkaConsumer011
<
String
>
myConsumer
=
new
FlinkKafkaConsumer011
<
String
>(
topicName
,
new
SimpleStringSchema
(),
props
);
FlinkKafkaConsumer011
<
String
>
myConsumer
=
new
BlMaiDianKafkaSource
(
brokers
,
topicName
,
groupId
,
startTime
).
addSource
(
);
myConsumer
.
setStartFromGroupOffsets
();
//默认消费策略
DataStreamSource
<
String
>
text
=
env
.
addSource
(
myConsumer
);
text
.
print
().
setParallelism
(
1
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment