Commit 3da79895 authored by 段英荣's avatar 段英荣

Merge branch 'dev' into 'master'

modify

See merge request !50
parents 3e2ef3de 5a1b98d0
......@@ -55,7 +55,6 @@ class PickUtils(object):
:return:
"""
try:
filter_term_list = cls.___get_filter_term_list(have_read_pick_celebrity_ids)
q = dict()
q["query"] = {
"function_score":{
......@@ -141,20 +140,7 @@ class PickUtils(object):
def get_celebrity_pick_query(cls,query,pick_id,offset,size):
try:
q = dict()
"""
multi_fields = {
'name': 2,
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
}
"""
q["query"] = {
#"multi_match":multi_match,
"match":{
"name":query
},
......
......@@ -99,9 +99,9 @@ class TopicUtils(object):
:return:
"""
try:
attention_user_id_term_list = list()
pick_user_id_term_list = list()
same_group_user_id_term_list = list()
attention_user_id_list = list()
pick_user_id_list = list()
same_group_id_list = list()
user_tag_list = list()
result_dict = TopicUtils.get_related_user_info(user_id, 0, 1)
......@@ -110,31 +110,18 @@ class TopicUtils(object):
else:
attention_user_info_list = result_dict["hits"][0]["_source"]["attention_user_id_list"]
attention_user_id_list = [item["user_id"] for item in attention_user_info_list]
"""
(attention_chinese_user_id_list, attention_japan_user_id_list,
attention_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(
related_user_id_list=attention_user_info_list)
"""
pick_user_info_list = result_dict["hits"][0]["_source"]["pick_user_id_list"]
pick_user_id_list = [item["user_id"] for item in pick_user_info_list]
"""
(pick_chinese_user_id_list, pick_japan_user_id_list,
pick_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(pick_user_info_list)
"""
same_group_user_info_list = result_dict["hits"][0]["_source"]["same_group_user_id_list"]
same_group_id_list = [item["user_id"] for item in same_group_user_info_list]
"""
(same_group_chinese_user_id_list, same_group_japan_user_id_list,
same_group_korea_user_id_list) = TopicUtils.analyze_related_user_id_list(same_group_user_info_list)
"""
user_tag_list = result_dict["hits"][0]["_source"]["tag_list"]
attention_user_id_term_list = cls.___get_should_term_list(attention_user_id_list,field_name="user_id")
pick_user_id_term_list = cls.___get_should_term_list(pick_user_id_list,field_name="user_id")
same_group_user_id_term_list = cls.___get_should_term_list(same_group_id_list,field_name="user_id")
# attention_user_id_term_list = cls.___get_should_term_list(attention_user_id_list,field_name="user_id")
# pick_user_id_term_list = cls.___get_should_term_list(pick_user_id_list,field_name="user_id")
# same_group_user_id_term_list = cls.___get_should_term_list(same_group_id_list,field_name="user_id")
q = dict()
q["query"] = dict()
......@@ -150,37 +137,37 @@ class TopicUtils(object):
}
]
if len(attention_user_id_term_list)>0:
if len(attention_user_id_list)>0:
functions_list.append(
{
"filter": {"bool": {
"should": attention_user_id_term_list}},
"should": {"terms":{"user_id":attention_user_id_list}}}},
"weight": 3,
}
)
if len(pick_user_id_term_list)>0:
if len(pick_user_id_list)>0:
functions_list.append(
{
"filter": {"bool": {
"should": pick_user_id_term_list}},
"should": {"terms":{"user_id":pick_user_id_list}}}},
"weight": 2
}
)
if len(same_group_user_id_term_list)>0:
if len(same_group_id_list)>0:
functions_list.append(
{
"filter": {"bool": {
"should": same_group_user_id_term_list}},
"should": {"terms":{"user_id":same_group_id_list}}}},
"weight": 1
}
)
query_tag_term_list = cls.___get_should_term_list(user_tag_list)
if len(query_tag_term_list)>0:
# query_tag_term_list = cls.___get_should_term_list(user_tag_list)
if len(user_tag_list)>0:
functions_list.append(
{
"filter":{"bool":{
"should":query_tag_term_list}},
"should":{"terms":{"tag_list":user_tag_list}}}},
"weight": 1
}
)
......@@ -273,11 +260,10 @@ class TopicUtils(object):
}
)
query_tag_term_list = cls.___get_should_term_list(topic_tag_list)
# query_tag_term_list = cls.___get_should_term_list(topic_tag_list)
query_function_score = {
"query":{
"bool":{
"should": query_tag_term_list,
"must": {
"range": {"content_level": {"gte": 3, "lte": 5}}
},
......@@ -292,6 +278,13 @@ class TopicUtils(object):
"boost_mode": "sum",
"functions": functions_list
}
if len(topic_tag_list)>0:
query_function_score["query"]["bool"]["should"]={
"terms":{
"tag_list":topic_tag_list
}
}
q["query"]["function_score"] = query_function_score
q["_source"] = {
"include":["id","group_id","user_id","_score"]
......
......@@ -70,7 +70,7 @@ class UserUtils(object):
q = dict()
q["query"] = dict()
recursion_attention_user_list = cls.___get_should_term_list(recursion_attention_user_id_list,field_name="user_id")
# recursion_attention_user_list = cls.___get_should_term_list(recursion_attention_user_id_list,field_name="user_id")
functions_list = [
{
......@@ -84,12 +84,12 @@ class UserUtils(object):
}
]
if len(recursion_attention_user_list) > 0:
if len(recursion_attention_user_id_list) > 0:
functions_list.append(
{
"filter":{
"bool":{
"should":recursion_attention_user_list
"should":{"terms":{"user_id":recursion_attention_user_id_list}}
}
},
"weight":10
......
......@@ -117,70 +117,6 @@ def home_query(session_id="",user_id=-1,query="",offset=0,size=10):
:return:
"""
try:
"""
if not user_id:
user_id=-1
if not isinstance(session_id,str):
session_id = ""
redis_key = "physical:home_query" + ":user_id:" + str(user_id) + ":session_id:" + session_id + ":query:" + str(query)
redis_val_dict = redis_client.hgetall(redis_key)
last_offset_num = int(redis_val_dict[b"last_offset_num"]) if b"last_offset_num" in redis_val_dict else -1
recommend_topic_ids = []
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size*size,query)
have_read_group_id_tuple = set()
unread_topic_id_list = list()
have_read_topic_id_set = set()
# 未登录用户已读帖子
unregister_user_have_redis_topic_id_dict = dict()
if len(redis_val_dict)>0 and offset>0:
if user_id>0 and offset==last_offset_num:
topic_id_list = json.loads(redis_val_dict[b"unread_topic_id"]) + topic_id_list
have_read_topic_id_set = set(json.loads(redis_val_dict[b"have_read_topic_id"]))
elif user_id==-1:
unregister_user_have_redis_topic_id_dict = json.loads(redis_val_dict[b"have_read_topic_id"])
for page_id in unregister_user_have_redis_topic_id_dict:
if int(page_id)<offset:
have_read_topic_id_set.union(set(unregister_user_have_redis_topic_id_dict[page_id]))
for item in topic_id_list:
if item["group_id"] in have_read_group_id_tuple:
unread_topic_id_list.append(item)
else:
if item["id"] not in have_read_topic_id_set:
recommend_topic_ids.append(item["id"])
if isinstance(item["group_id"],int) and item["group_id"]>0:
have_read_group_id_tuple.add(item["group_id"])
have_read_topic_id_set.add(item["id"])
if len(recommend_topic_ids) >= size:
break
if len(recommend_topic_ids) < size and len(unread_topic_id_list)>0:
recommend_len = len(recommend_topic_ids)
offi_unread_topic_id = [item["id"] for item in unread_topic_id_list[:(size-recommend_len)]]
recommend_topic_ids = recommend_topic_ids + offi_unread_topic_id
unread_topic_id_list = unread_topic_id_list[(size-recommend_len):]
redis_have_redis_data = ""
if user_id>0:
redis_have_redis_data = json.dumps(list(have_read_topic_id_set))
else:
unregister_user_have_redis_topic_id_dict[offset] = recommend_topic_ids
redis_have_redis_data = json.dumps(unregister_user_have_redis_topic_id_dict)
redis_dict = {
"unread_topic_id":json.dumps(unread_topic_id_list),
"have_read_topic_id":redis_have_redis_data,
"last_offset_num":offset+size
}
redis_client.hmset(redis_key,redis_dict)
# 每个session key保存15分钟
redis_client.expire(redis_key,15*60*60)
"""
if not user_id:
user_id=-1
if not isinstance(session_id,str):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment