Commit 076eb1c3 authored by 段英荣's avatar 段英荣

modify

parent 0f35bf68
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import redis
from django.conf import settings
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import RPCDFaultException
from gm_types.doris.error import ERROR
from raven.contrib.django.raven_compat.models import client as _sentry_client
......
# coding=utf-8
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, absolute_import
import six
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import django
......
# coding=utf-8
"""
__author__ = 'xumingming'
"""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
......
......@@ -88,7 +88,7 @@ class TopicUtils(object):
return []
@classmethod
def get_recommend_topic_ids(cls,user_id,offset,size,is_first_time,group_topic_ids_index,not_group_topic_ids_index,query=None):
def get_recommend_topic_ids(cls,user_id,offset,size,is_first_time,query=None):
"""
:需增加打散逻辑
:remark:获取首页推荐帖子列表
......@@ -96,48 +96,40 @@ class TopicUtils(object):
:param offset:
:param size:
:param is_first_time:
:param group_topic_ids_index:
:param not_group_topic_ids_index:
:return:
"""
try:
#重写offset,size
size = 1000
if is_first_time:
offset=0
else:
if group_topic_ids_index>0:
offset = 1000 * group_topic_ids_index
else:
offset = 1000 * not_group_topic_ids_index
attention_user_id_term_list = list()
pick_user_id_term_list = list()
same_group_user_id_term_list = list()
user_tag_list = list()
result_dict = TopicUtils.get_related_user_info(user_id, 0, 1)
if len(result_dict["hits"]) == 0:
logging.error("not find user_id:%d in es!" % int(user_id))
return ([],[])
logging.warning("not find user_id:%d in es!" % int(user_id))
else:
attention_user_info_list = result_dict["hits"][0]["_source"]["attention_user_id_list"]
(attention_chinese_user_id_list, attention_japan_user_id_list,
attention_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(
related_user_id_list=attention_user_info_list)
logging.info("duan add,result_dict:%s" % str(result_dict).encode("utf-8"))
attention_user_info_list = result_dict["hits"][0]["_source"]["attention_user_id_list"]
(attention_chinese_user_id_list, attention_japan_user_id_list,
attention_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(
related_user_id_list=attention_user_info_list)
pick_user_info_list = result_dict["hits"][0]["_source"]["pick_user_id_list"]
(pick_chinese_user_id_list, pick_japan_user_id_list,
pick_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(pick_user_info_list)
pick_user_info_list = result_dict["hits"][0]["_source"]["pick_user_id_list"]
(pick_chinese_user_id_list, pick_japan_user_id_list,
pick_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(pick_user_info_list)
same_group_user_info_list = result_dict["hits"][0]["_source"]["same_group_user_id_list"]
(same_group_chinese_user_id_list, same_group_japan_user_id_list,
same_group_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(same_group_user_info_list)
same_group_user_info_list = result_dict["hits"][0]["_source"]["same_group_user_id_list"]
(same_group_chinese_user_id_list, same_group_japan_user_id_list,
same_group_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(same_group_user_info_list)
user_tag_list = result_dict["hits"][0]["_source"]["tag_list"]
user_tag_list = result_dict["hits"][0]["_source"]["tag_list"]
attention_user_id_term_list = cls.___get_should_term_list(attention_chinese_user_id_list + attention_japan_user_id_list + attention_korea_user_id_list)
pick_user_id_term_list = cls.___get_should_term_list(pick_chinese_user_id_list + pick_japan_user_id_list + pick_korea_user_id_list)
same_group_user_id_term_list = cls.___get_should_term_list(same_group_chinese_user_id_list + same_group_japan_user_id_list + same_group_korea_user_id_list)
q = dict()
q["query"] = dict()
attention_user_id_term_list = cls.___get_should_term_list(attention_chinese_user_id_list + attention_japan_user_id_list + attention_korea_user_id_list)
pick_user_id_term_list = cls.___get_should_term_list(pick_chinese_user_id_list + pick_japan_user_id_list + pick_korea_user_id_list)
same_group_user_id_term_list = cls.___get_should_term_list(same_group_chinese_user_id_list + same_group_japan_user_id_list + same_group_korea_user_id_list)
functions_list = [
{
"filter": {"bool": {
......@@ -212,25 +204,28 @@ class TopicUtils(object):
logging.info("duan add,es query:%s" % str(q).encode("utf-8"))
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="topic", query_body=q,
offset=offset, size=size)
group_topic_ids = list()
not_group_topic_ids = list()
"""
group_topic_ids = list()
not_group_topic_ids = list()
q["filter"] = {
"range": {"content_level": {"gte": 3, "lte": 5}}
}
"""
for item in result_dict["hits"]:
if item["_source"]["group_id"] and item["_source"]["group_id"]> 0:
group_topic_ids.append(item["_source"]["id"])
else:
not_group_topic_ids.append(item["_source"]["id"])
return (group_topic_ids,not_group_topic_ids)
"""
if len(result_dict["hits"])>0:
return [item["_source"] for item in result_dict["hits"]]
else:
return []
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ([],[])
return []
@classmethod
def get_topic_detail_recommend_list(cls,user_id,topic_tag_list,topic_group_id,topic_user_id,offset,size):
......
......@@ -11,44 +11,53 @@ from libs.cache import redis_client
@bind("physical/search/home_recommend")
def home_recommend(user_id=-1,offset=0,size=10):
def home_recommend(ctx,user_id=-1,offset=0,size=10):
"""
:remark:首页推荐,目前只推荐日记,缺少小组打散策略
:return:
"""
try:
logging.info("duan add,ctx:%s" % str(ctx))
if not user_id:
user_id=-1
redis_key = "physical:home_recommend:" + "user_id:" + str(user_id)
redis_val_dict = redis_client.hgetall(redis_key)
if len(redis_val_dict) > 0:
group_topic_ids = json.loads(redis_val_dict[b"group_topic_ids"])
not_group_topic_ids = json.loads(redis_val_dict[b"not_group_topic_ids"])
group_topic_ids_index = int(redis_val_dict[b"group_topic_ids_index"])
not_group_topic_ids_index = int(redis_val_dict[b"not_group_topic_ids_index"])
if len(group_topic_ids) < (size-1) or len(not_group_topic_ids) < 1:
if len(group_topic_ids) < (size-1):
group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index)
group_topic_ids += new_group_topic_ids
else:
not_group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index)
not_group_topic_ids += new_not_group_topic_ids
recommend_topic_ids = group_topic_ids[:(size-1)] + not_group_topic_ids[:1]
redis_hash_dict = {
"group_topic_ids": group_topic_ids[(size-1):],
"not_group_topic_ids": not_group_topic_ids[1:],
"group_topic_ids_index":group_topic_ids_index,
"not_group_topic_ids_index":not_group_topic_ids_index
}
TopicUtils.refresh_redis_hash_data(redis_client,redis_key,redis_hash_dict)
return {"recommend_topic_ids":recommend_topic_ids}
#if len(redis_val_dict) > 0:
"""
group_topic_ids = json.loads(redis_val_dict[b"group_topic_ids"])
not_group_topic_ids = json.loads(redis_val_dict[b"not_group_topic_ids"])
group_topic_ids_index = int(redis_val_dict[b"group_topic_ids_index"])
not_group_topic_ids_index = int(redis_val_dict[b"not_group_topic_ids_index"])
if len(group_topic_ids) < (size-1) or len(not_group_topic_ids) < 1:
if len(group_topic_ids) < (size-1):
group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index)
group_topic_ids += new_group_topic_ids
else:
not_group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index)
not_group_topic_ids += new_not_group_topic_ids
"""
have_read_topic_ids_list = json.loads(redis_val_dict[b"have_read_topic_ids"])
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size,False)
"""
recommend_topic_ids = group_topic_ids[:(size-1)] + not_group_topic_ids[:1]
redis_hash_dict = {
"group_topic_ids": group_topic_ids[(size-1):],
"not_group_topic_ids": not_group_topic_ids[1:],
"group_topic_ids_index":group_topic_ids_index,
"not_group_topic_ids_index":not_group_topic_ids_index
}
TopicUtils.refresh_redis_hash_data(redis_client,redis_key,redis_hash_dict)
"""
recommend_topic_ids = [item["id"] for item in topic_id_list]
return {"recommend_topic_ids":recommend_topic_ids}
"""
else:
(group_topic_ids,not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id,offset,size,True,0,0)
recommend_topic_ids = group_topic_ids[:(size - 1)] + not_group_topic_ids[:1]
......@@ -60,7 +69,9 @@ def home_recommend(user_id=-1,offset=0,size=10):
}
TopicUtils.refresh_redis_hash_data(redis_client, redis_key, redis_hash_dict)
return {"recommend_topic_ids": recommend_topic_ids}
return {"recommend_topic_ids": recommend_topic_ids}
"""
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
......
......@@ -11,7 +11,6 @@ from libs.cache import redis_client
from search.utils.user import UserUtils
@bind("physical/search/recommend_user")
def recommend_user(user_id=-1,offset=0,size=10):
"""
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment