Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
F
ffm-baseline
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ML
ffm-baseline
Commits
3e814f90
Commit
3e814f90
authored
6 years ago
by
张彦钊
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
change test file
parent
94338b4a
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
19 additions
and
19 deletions
+19
-19
hello.py
hello.py
+19
-19
No files found.
hello.py
View file @
3e814f90
...
...
@@ -96,7 +96,7 @@ def write_redis(device_id,cid_list):
def
model
(
rdd
):
try
:
rdd
.
repartition
(
10
)
.
filter
(
lambda
x
:
maidian
(
x
))
rdd
.
filter
(
lambda
x
:
maidian
(
x
))
#.map(lambda x:get_data(x)).na.drop().\
#groupByKey().map(lambda x,y:write_redis(x,y))
except
Exception
as
e
:
...
...
@@ -104,24 +104,24 @@ def model(rdd):
print
(
e
)
if
__name__
==
'__main__'
:
sc
=
SparkContext
(
conf
=
SparkConf
()
.
setMaster
(
"spark://nvwa01:7077"
)
.
setAppName
(
"dislike_filter"
)
.
set
(
"spark.io.compression.codec"
,
"lzf"
))
ssc
=
StreamingContext
(
sc
,
10
)
sc
.
setLogLevel
(
"WARN"
)
kafkaParams
=
{
"metadata.broker.list"
:
"172.16.44.25:9092,172.16.44.31:9092,172.16.44.45:9092"
,
"group.id"
:
"dislike"
,
"socket.timeout.ms"
:
"600000"
,
"auto.offset.reset"
:
"largest"
}
try
:
stream
=
KafkaUtils
.
createDirectStream
(
ssc
,
[
"gm-maidian-data"
],
kafkaParams
,
keyDecoder
=
gbk_decoder
,
valueDecoder
=
gbk_decoder
)
transformstream
=
stream
.
transform
(
lambda
x
:
model
(
x
))
#
transformstream.pprint()
ssc
.
start
()
ssc
.
awaitTermination
()
except
Exception
as
e
:
print
(
e
)
#
if __name__ == '__main__':
sc
=
SparkContext
(
conf
=
SparkConf
()
.
setMaster
(
"spark://nvwa01:7077"
)
.
setAppName
(
"dislike_filter"
)
.
set
(
"spark.io.compression.codec"
,
"lzf"
))
ssc
=
StreamingContext
(
sc
,
10
)
sc
.
setLogLevel
(
"WARN"
)
kafkaParams
=
{
"metadata.broker.list"
:
"172.16.44.25:9092,172.16.44.31:9092,172.16.44.45:9092"
,
"group.id"
:
"dislike"
,
"socket.timeout.ms"
:
"600000"
,
"auto.offset.reset"
:
"largest"
}
try
:
stream
=
KafkaUtils
.
createDirectStream
(
ssc
,
[
"gm-maidian-data"
],
kafkaParams
,
keyDecoder
=
gbk_decoder
,
valueDecoder
=
gbk_decoder
)
transformstream
=
stream
.
transform
(
lambda
x
:
model
(
x
))
transformstream
.
pprint
()
ssc
.
start
()
ssc
.
awaitTermination
()
except
Exception
as
e
:
print
(
e
)
# send_email(sc.appName, sc.applicationId, e)
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment