Commit db0591f2 authored by 段英荣's avatar 段英荣

modify

parent 7641a9e5
......@@ -88,7 +88,7 @@ class TopicUtils(object):
return []
@classmethod
def get_recommend_topic_ids(cls,user_id,offset,size,is_first_time,query=None):
def get_recommend_topic_ids(cls,user_id,offset,size,query=None):
"""
:需增加打散逻辑
:remark:获取首页推荐帖子列表
......@@ -244,17 +244,21 @@ class TopicUtils(object):
q["query"] = dict()
functions_list = [
{
"filter": {"term": {
"group_id": topic_group_id}},
"weight": 1,
},
{
"filter": {"term": {
"user_id": topic_user_id}},
"weight": 1000
}
]
if isinstance(topic_group_id,int) and topic_group_id > 0:
functions_list.append(
{
"filter": {"term": {
"group_id": topic_group_id}},
"weight": 1,
}
)
query_tag_term_list = cls.___get_should_term_list(topic_tag_list)
query_function_score = {
"query":{
......
......@@ -11,7 +11,7 @@ from libs.cache import redis_client
@bind("physical/search/home_recommend")
def home_recommend(user_id=-1,offset=0,size=10):
def home_recommend(session_id="",user_id=-1,offset=0,size=10):
"""
:remark:首页推荐,目前只推荐日记,缺少小组打散策略
:return:
......@@ -19,65 +19,52 @@ def home_recommend(user_id=-1,offset=0,size=10):
try:
if not user_id:
user_id=-1
redis_key = "physical:home_recommend:" + "user_id:" + str(user_id)
redis_key = "physical:home_recommend:" + "user_id:" + str(user_id) + "session_id:" + session_id
redis_val_dict = redis_client.hgetall(redis_key)
#if len(redis_val_dict) > 0:
"""
group_topic_ids = json.loads(redis_val_dict[b"group_topic_ids"])
not_group_topic_ids = json.loads(redis_val_dict[b"not_group_topic_ids"])
group_topic_ids_index = int(redis_val_dict[b"group_topic_ids_index"])
not_group_topic_ids_index = int(redis_val_dict[b"not_group_topic_ids_index"])
if len(group_topic_ids) < (size-1) or len(not_group_topic_ids) < 1:
if len(group_topic_ids) < (size-1):
group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index)
group_topic_ids += new_group_topic_ids
else:
not_group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index)
not_group_topic_ids += new_not_group_topic_ids
"""
#have_read_topic_ids_list = json.loads(redis_val_dict[b"have_read_topic_ids"])
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size,False)
recommend_topic_ids = []
"""
recommend_topic_ids = group_topic_ids[:(size-1)] + not_group_topic_ids[:1]
redis_hash_dict = {
"group_topic_ids": group_topic_ids[(size-1):],
"not_group_topic_ids": not_group_topic_ids[1:],
"group_topic_ids_index":group_topic_ids_index,
"not_group_topic_ids_index":not_group_topic_ids_index
}
TopicUtils.refresh_redis_hash_data(redis_client,redis_key,redis_hash_dict)
"""
recommend_topic_ids = [item["id"] for item in topic_id_list]
return {"recommend_topic_ids":recommend_topic_ids}
size = size*size
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size)
have_read_group_id_tuple = set()
unread_topic_id_list = list()
have_read_topic_id_tuple = set()
"""
if len(redis_val_dict)>0:
topic_id_list = redis_val_dict["unread_topic_id"] + topic_id_list
have_read_topic_id_tuple = set(redis_val_dict["have_read_topic_id"])
for item in topic_id_list:
if item["group_id"] in have_read_group_id_tuple:
unread_topic_id_list.append(item["id"])
else:
(group_topic_ids,not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id,offset,size,True,0,0)
recommend_topic_ids = group_topic_ids[:(size - 1)] + not_group_topic_ids[:1]
redis_hash_dict = {
"group_topic_ids": group_topic_ids[(size - 1):],
"not_group_topic_ids": not_group_topic_ids[1:],
"group_topic_ids_index": 0,
"not_group_topic_ids_index": 0
if item["id"] not in have_read_topic_id_tuple:
recommend_topic_ids.append(item["id"])
if isinstance(item["group_id"],int) and item["group_id"]>0:
have_read_group_id_tuple.add(item["group_id"])
have_read_topic_id_tuple.add(item["id"])
if len(recommend_topic_ids) >= size:
break
if len(recommend_topic_ids) < size and len(unread_topic_id_list)>0:
recommend_len = len(recommend_topic_ids)
recommend_topic_ids = recommend_topic_ids + unread_topic_id_list[:(size-recommend_len)]
unread_topic_id_list = unread_topic_id_list[(size-recommend_len):]
if len(unread_topic_id_list)>0:
redis_dict = {
"unread_topic_id":unread_topic_id_list,
"have_read_topic_id":have_read_topic_id_tuple
}
TopicUtils.refresh_redis_hash_data(redis_client, redis_key, redis_hash_dict)
return {"recommend_topic_ids": recommend_topic_ids}
"""
redis_client.hmset(redis_key,redis_dict)
return {"recommend_topic_ids":recommend_topic_ids}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
@bind("physical/search/home_query")
def home_query(user_id=-1,query="",offset=0,size=10):
def home_query(session_id="",user_id=-1,query="",offset=0,size=10):
"""
:remark 首页搜索排序,没有做打散,搜索逻辑优化
:param query:
......@@ -88,48 +75,45 @@ def home_query(user_id=-1,query="",offset=0,size=10):
try:
if not user_id:
user_id=-1
redis_key = "physical:home_query:" + "user_id:" + str(user_id)
redis_key = "physical:home_query:" + "user_id:" + str(user_id) + "session_id:" + session_id
redis_val_dict = redis_client.hgetall(redis_key)
if len(redis_val_dict) > 0:
group_topic_ids = json.loads(redis_val_dict[b"group_topic_ids"])
not_group_topic_ids = json.loads(redis_val_dict[b"not_group_topic_ids"])
group_topic_ids_index = int(redis_val_dict[b"group_topic_ids_index"])
not_group_topic_ids_index = int(redis_val_dict[b"not_group_topic_ids_index"])
if len(group_topic_ids) < (size-1) or len(not_group_topic_ids) < 1:
if len(group_topic_ids) < (size-1):
group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index,query)
group_topic_ids += new_group_topic_ids
else:
not_group_topic_ids_index += 1
(new_group_topic_ids, new_not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id, offset, size,
False, group_topic_ids_index, not_group_topic_ids_index,query)
not_group_topic_ids += new_not_group_topic_ids
recommend_topic_ids = group_topic_ids[:(size-1)] + not_group_topic_ids[:1]
redis_hash_dict = {
"group_topic_ids": group_topic_ids[(size-1):],
"not_group_topic_ids": not_group_topic_ids[1:],
"group_topic_ids_index":group_topic_ids_index,
"not_group_topic_ids_index":not_group_topic_ids_index
}
TopicUtils.refresh_redis_hash_data(redis_client,redis_key,redis_hash_dict)
return {"recommend_topic_ids":recommend_topic_ids}
recommend_topic_ids = []
size = size*size
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size,query)
have_read_group_id_tuple = set()
unread_topic_id_list = list()
have_read_topic_id_tuple = set()
if len(redis_val_dict)>0:
topic_id_list = redis_val_dict["unread_topic_id"] + topic_id_list
have_read_topic_id_tuple = set(redis_val_dict["have_read_topic_id"])
for item in topic_id_list:
if item["group_id"] in have_read_group_id_tuple:
unread_topic_id_list.append(item["id"])
else:
(group_topic_ids,not_group_topic_ids) = TopicUtils.get_recommend_topic_ids(user_id,offset,size,True,0,0,query)
recommend_topic_ids = group_topic_ids[:(size - 1)] + not_group_topic_ids[:1]
redis_hash_dict = {
"group_topic_ids": group_topic_ids[(size - 1):],
"not_group_topic_ids": not_group_topic_ids[1:],
"group_topic_ids_index": 0,
"not_group_topic_ids_index": 0
if item["id"] not in have_read_topic_id_tuple:
recommend_topic_ids.append(item["id"])
if isinstance(item["group_id"],int) and item["group_id"]>0:
have_read_group_id_tuple.add(item["group_id"])
have_read_topic_id_tuple.add(item["id"])
if len(recommend_topic_ids) >= size:
break
if len(recommend_topic_ids) < size and len(unread_topic_id_list)>0:
recommend_len = len(recommend_topic_ids)
recommend_topic_ids = recommend_topic_ids + unread_topic_id_list[:(size-recommend_len)]
unread_topic_id_list = unread_topic_id_list[(size-recommend_len):]
if len(unread_topic_id_list)>0:
redis_dict = {
"unread_topic_id":unread_topic_id_list,
"have_read_topic_id":have_read_topic_id_tuple
}
TopicUtils.refresh_redis_hash_data(redis_client, redis_key, redis_hash_dict)
redis_client.hmset(redis_key,redis_dict)
return {"recommend_topic_ids": recommend_topic_ids}
return {"recommend_topic_ids":recommend_topic_ids}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
......
......@@ -24,7 +24,11 @@ class TopicTransfer(object):
res["content"] = instance.content
res["content_level"] = instance.content_level
res["user_id"] = instance.user_id
res["group_id"] = instance.group_id
if instance.group:
res["group_id"] = instance.group.id
else:
res["group_id"] = -1
res["share_num"] = instance.share_num
res["pick_id_list"] = instance.get_pick_id_info()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment