Commit 84b22900 authored by 高雅喆's avatar 高雅喆

用户冷启动全量使用轻医美标签,去掉灰度实验

parent 62ccacc7
......@@ -142,41 +142,42 @@ if __name__ == "__main__":
# 清空历史数据
redis_client = redis.StrictRedis.from_url('redis://:ReDis!GmTx*0aN9@172.16.40.173:6379')
hot_search_word_topic_queue_key = "coldstart:hot:search:word:topic:queue"
hot_search_word_qa_queue_key = "coldstart:hot:search:word:qa:queue"
hot_search_word_diary_queue_key = "coldstart:hot:search:word:diary:queue"
# hot_search_word_topic_queue_key = "coldstart:hot:search:word:topic:queue"
# hot_search_word_qa_queue_key = "coldstart:hot:search:word:qa:queue"
# hot_search_word_diary_queue_key = "coldstart:hot:search:word:diary:queue"
light_clinic_beauty_topic_queue_key = "coldstart:light:clinic:beauty:topic:queue"
light_clinic_beauty_qa_queue_key = "coldstart:light:clinic:beauty:qa:queue"
light_clinic_beauty_diary_queue_key = "coldstart:light:clinic:beauty:diary:queue"
card_types = ['topic', 'qa'] # 日记有400多个城市,且是hmset,因此不用重置
word_refers = ['coldstart:hot:search:word', 'coldstart:light:clinic:beauty']
# word_refers = ['coldstart:hot:search:word', 'coldstart:light:clinic:beauty']
word_refers = ['coldstart:light:clinic:beauty']
for card_type in card_types:
for word_refer in word_refers:
key = word_refer + ':' + card_type + ':' + 'queue'
redis_client.delete(key)
# 热搜词的候选队列
hot_search_word_key = "user:service_coldstart_tags2_name"
hot_search_word = redis_client.hgetall(hot_search_word_key)
hot_search_word = [str(tag, 'utf-8') for tag in hot_search_word]
hot_search_word_diary_queue = dict()
# 队列存储
hot_search_word_topic_queue = search_topic_by_match_phrase(hot_search_word)
redis_client.rpush(hot_search_word_topic_queue_key, *hot_search_word_topic_queue)
# redis_client.lrange(hot_search_word_topic_queue_key, 0, 3)
print("热搜词更新的帖子队列长度:%s" % str(len(hot_search_word_topic_queue)))
hot_search_word_qa_queue = search_qa_by_match_phrase(hot_search_word)
redis_client.rpush(hot_search_word_qa_queue_key, *hot_search_word_qa_queue)
print("热搜词更新的问答队列长度:%s" % str(len(hot_search_word_qa_queue)))
# redis_client.lrange(hot_search_word_qa_queue_key, 0, 3)
for city_tag_id in all_city_tag_id:
diary_queue = search_diary_by_match_phrase(hot_search_word, city_tag_id)
hot_search_word_diary_queue.update({city_tag_id: json.dumps(diary_queue)})
redis_client.hmset(hot_search_word_diary_queue_key, hot_search_word_diary_queue)
print("热搜词更新的日记队列长度:%s" % str(len(diary_queue)))
# # 热搜词的候选队列
# hot_search_word_key = "user:service_coldstart_tags2_name"
# hot_search_word = redis_client.hgetall(hot_search_word_key)
# hot_search_word = [str(tag, 'utf-8') for tag in hot_search_word]
# hot_search_word_diary_queue = dict()
#
# # 队列存储
# hot_search_word_topic_queue = search_topic_by_match_phrase(hot_search_word)
# redis_client.rpush(hot_search_word_topic_queue_key, *hot_search_word_topic_queue)
# # redis_client.lrange(hot_search_word_topic_queue_key, 0, 3)
# print("热搜词更新的帖子队列长度:%s" % str(len(hot_search_word_topic_queue)))
#
# hot_search_word_qa_queue = search_qa_by_match_phrase(hot_search_word)
# redis_client.rpush(hot_search_word_qa_queue_key, *hot_search_word_qa_queue)
# print("热搜词更新的问答队列长度:%s" % str(len(hot_search_word_qa_queue)))
# # redis_client.lrange(hot_search_word_qa_queue_key, 0, 3)
#
# for city_tag_id in all_city_tag_id:
# diary_queue = search_diary_by_match_phrase(hot_search_word, city_tag_id)
# hot_search_word_diary_queue.update({city_tag_id: json.dumps(diary_queue)})
# redis_client.hmset(hot_search_word_diary_queue_key, hot_search_word_diary_queue)
# print("热搜词更新的日记队列长度:%s" % str(len(diary_queue)))
# 轻医美的候选队列
light_clinic_beauty_key = "user:service_coldstart_tags3"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment