Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
S
streamingUserPortrait
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
赵威
streamingUserPortrait
Commits
da90b6b6
Commit
da90b6b6
authored
Feb 04, 2021
by
赵威
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
decouple kafka
parent
dc7a678b
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
48 additions
and
8 deletions
+48
-8
Main.scala
src/main/scala/com.gmei.up/Main.scala
+46
-8
ES.scala
src/main/scala/com.gmei.up/utils/ES.scala
+1
-0
User.scala
src/main/scala/com.gmei.up/utils/User.scala
+1
-0
No files found.
src/main/scala/com.gmei.up/Main.scala
View file @
da90b6b6
...
...
@@ -3,21 +3,59 @@ package com.gmei.up
import
java.util.Properties
import
org.apache.flink.api.scala._
import
org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import
org.apache.flink.streaming.util.serialization.SimpleStringSchema
import
org.apache.flink.streaming.connectors.kafka.
{
FlinkKafkaConsumer
,
FlinkKafkaProducer
}
import
org.apache.flink.streaming.connectors.kafka.
{
FlinkKafkaConsumer
,
FlinkKafkaProducer
,
KafkaDeserializationSchema
}
import
org.apache.flink.api.common.typeinfo.
{
TypeHint
,
TypeInformation
,
Types
}
import
org.apache.kafka.clients.consumer.ConsumerRecord
case
class
KafkaUserInfo
(
deviceId
:
String
,
action
:
String
,
logTime
:
Double
,
eventCn
:
String
,
secondDemands
:
Array
[
String
],
projects
:
Array
[
String
]
)
class
UserInfoDeserializationSchema
extends
KafkaDeserializationSchema
[
KafkaUserInfo
]
{
override
def
isEndOfStream
(
t
:
KafkaUserInfo
)
:
Boolean
=
false
override
def
deserialize
(
consumerRecord
:
ConsumerRecord
[
Array
[
Byte
]
,
Array
[
Byte
]])
:
KafkaUserInfo
=
{
// KafkaUserInfo(consumerRecord.topic(), new String(consumerRecord.value(), "UTF-8"))
println
(
consumerRecord
)
KafkaUserInfo
(
"abc"
,
"abc"
,
123123
,
"abc"
,
Array
(
"abc"
,
"abc"
),
Array
(
"abc"
,
"abc"
)
)
}
override
def
getProducedType
:
TypeInformation
[
KafkaUserInfo
]
=
Types
.
GENERIC
(
classOf
[
KafkaUserInfo
])
}
object
Main
{
def
main
(
args
:
Array
[
String
])
:
Unit
=
{
val
env
=
StreamExecutionEnvironment
.
getExecutionEnvironment
val
properties
=
new
Properties
properties
.
setProperty
(
"group.id"
,
"user_portrait_flink_streaming"
)
properties
.
setProperty
(
"bootstrap.servers"
,
"172.16.44.25:9092,172.16.44.31:9092,172.16.44.45:9092"
)
// TODO read from config
val
kafkaConsumerProperties
=
new
Properties
kafkaConsumerProperties
.
setProperty
(
"group.id"
,
"user_portrait_flink_streaming"
)
kafkaConsumerProperties
.
setProperty
(
"bootstrap.servers"
,
"172.16.44.25:9092,172.16.44.31:9092,172.16.44.45:9092"
)
val
kafkaConsumer
=
new
FlinkKafkaConsumer
[
String
](
val
kafkaConsumer
=
new
FlinkKafkaConsumer
[
KafkaUserInfo
](
"gm-portrait-update-device"
,
new
SimpleString
Schema
,
p
roperties
new
UserInfoDeserialization
Schema
,
kafkaConsumerP
roperties
)
val
stream
=
env
.
addSource
(
kafkaConsumer
)
...
...
src/main/scala/com.gmei.up/utils/ES.scala
0 → 100644
View file @
da90b6b6
package
com.gmei.up.utils
src/main/scala/com.gmei.up/utils/User.scala
0 → 100644
View file @
da90b6b6
package
com.gmei.up.utils
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment