Commit 8c7abb46 authored by 张彦钊's avatar 张彦钊

change test file

parent 3fcc201b
...@@ -33,12 +33,12 @@ def gbk_decoder(s): ...@@ -33,12 +33,12 @@ def gbk_decoder(s):
def maidian(x): def maidian(x):
try: try:
data = x[1] data = x[1]
if 'type' in data and 'device' in data and 'params' in data and 'card_content_type' in data['params'] \ if 'type' in data and 'device' in data and 'params' in data and 'card_content_type' in data['params']:
and data['type'] == 'on_click_card' and data['params']['card_content_type'] == 'diary': if data['type'] == 'on_click_card' and data['params']['card_content_type'] == 'diary':
# and data["device"]['device_id'] == "E417C286-40A4-42F6-BDA9-AEEBD8FEC3B6" # and data["device"]['device_id'] == "E417C286-40A4-42F6-BDA9-AEEBD8FEC3B6"
print("get device id") return True
return True else:
return False
else: else:
return False return False
...@@ -105,7 +105,7 @@ def model(rdd): ...@@ -105,7 +105,7 @@ def model(rdd):
if __name__ == '__main__': if __name__ == '__main__':
sc = SparkContext(conf=SparkConf().setMaster("spark://nvwa01:7077").setAppName("dislike_filter").set( sc = SparkContext(conf=SparkConf().setMaster("spark://nvwa01:7077").setAppName("dislike_filter").set(
"spark.io.compression.codec", "lzf")) "spark.io.compression.codec", "lzf"))
ssc = StreamingContext(sc, 10) ssc = StreamingContext(sc, 6)
sc.setLogLevel("WARN") sc.setLogLevel("WARN")
kafkaParams = {"metadata.broker.list": "172.16.44.25:9092,172.16.44.31:9092,172.16.44.45:9092", kafkaParams = {"metadata.broker.list": "172.16.44.25:9092,172.16.44.31:9092,172.16.44.45:9092",
"group.id": "dislike", "group.id": "dislike",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment