Commit 2ef62259 authored by 段英荣's avatar 段英荣

Merge branch 'similar_sort' into 'master'

modify linucb

See merge request alpha/physical!143
parents a7fe5395 611bb111
......@@ -83,57 +83,65 @@ class CollectData(object):
kafka_consumer_obj = KafkaManager.get_kafka_consumer_ins(topic_name)
for ori_msg in kafka_consumer_obj:
logging.info(ori_msg)
try:
logging.info(ori_msg)
raw_val_dict = json.loads(ori_msg.value)
raw_val_dict = json.loads(ori_msg.value)
if "type" in raw_val_dict and "on_click_feed_topic_card"==raw_val_dict["type"]:
topic_id = raw_val_dict["params"]["business_id"]
device_id = raw_val_dict["device"]["device_id"]
if "type" in raw_val_dict and "on_click_feed_topic_card" == raw_val_dict["type"]:
topic_id = raw_val_dict["params"]["business_id"]
device_id = raw_val_dict["device"]["device_id"]
logging.info("consume topic_id:%s,device_id:%s" % (str(topic_id),str(device_id)))
tag_list = list()
sql_query_results = TopicTag.objects.filter(is_online=True,topic_id=topic_id)
for sql_item in sql_query_results:
tag_list.append(sql_item.tag_id)
is_click = 1
is_vote = 0
reward = 1 if is_click or is_vote else 0
logging.info("positive tag_list,device_id:%s,topic_id:%s,tag_list:%s" % (str(device_id), str(topic_id), str(tag_list)))
for tag_id in tag_list:
self.update_user_linucb_tag_info(reward,device_id,tag_id,user_feature)
# 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
self.update_recommend_tag_list(device_id, user_feature)
elif "type" in raw_val_dict and "page_precise_exposure"==raw_val_dict["type"]:
exposure_cards_list = json.loads(raw_val_dict["params"]["exposure_cards"])
device_id = raw_val_dict["device"]["device_id"]
for item in exposure_cards_list:
exposure_topic_id = item["card_id"]
logging.info("consume exposure topic_id:%s,device_id:%s" % (str(exposure_topic_id),str(device_id)))
logging.info("consume topic_id:%s,device_id:%s" % (str(topic_id), str(device_id)))
tag_list = list()
sql_query_results = TopicTag.objects.filter(is_online=True,topic_id=exposure_topic_id)
sql_query_results = TopicTag.objects.filter(is_online=True, topic_id=topic_id)
for sql_item in sql_query_results:
tag_list.append(sql_item.tag_id)
is_click = 0
is_click = 1
is_vote = 0
reward = 1 if is_click or is_vote else 0
logging.info("negative tag_list,device_id:%s,topic_id:%s,tag_list:%s" % (str(device_id),str(exposure_topic_id),str(tag_list)))
logging.info("positive tag_list,device_id:%s,topic_id:%s,tag_list:%s" % (
str(device_id), str(topic_id), str(tag_list)))
for tag_id in tag_list:
self.update_user_linucb_tag_info(reward,device_id,tag_id,user_feature)
self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
# 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
self.update_recommend_tag_list(device_id, user_feature)
else:
logging.warning("unknown type msg:%s" % raw_val_dict.get("type","missing type"))
elif "type" in raw_val_dict and "page_precise_exposure" == raw_val_dict["type"]:
exposure_cards_list = json.loads(raw_val_dict["params"]["exposure_cards"])
device_id = raw_val_dict["device"]["device_id"]
for item in exposure_cards_list:
if "card_id" not in item:
continue
exposure_topic_id = item["card_id"]
logging.info(
"consume exposure topic_id:%s,device_id:%s" % (str(exposure_topic_id), str(device_id)))
tag_list = list()
sql_query_results = TopicTag.objects.filter(is_online=True, topic_id=exposure_topic_id)
for sql_item in sql_query_results:
tag_list.append(sql_item.tag_id)
is_click = 0
is_vote = 0
reward = 1 if is_click or is_vote else 0
logging.info("negative tag_list,device_id:%s,topic_id:%s,tag_list:%s" % (
str(device_id), str(exposure_topic_id), str(tag_list)))
for tag_id in tag_list:
self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
# 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
self.update_recommend_tag_list(device_id, user_feature)
else:
logging.warning("unknown type msg:%s" % raw_val_dict.get("type", "missing type"))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return True
except:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment